< prev index next >

src/hotspot/share/classfile/classFileParser.cpp

Print this page

        

*** 53,62 **** --- 53,63 ---- #include "oops/klassVtable.hpp" #include "oops/metadata.hpp" #include "oops/method.hpp" #include "oops/oop.inline.hpp" #include "oops/symbol.hpp" + #include "oops/valueKlass.hpp" #include "prims/jvmtiExport.hpp" #include "prims/jvmtiThreadState.hpp" #include "runtime/arguments.hpp" #include "runtime/handles.inline.hpp" #include "runtime/javaCalls.hpp"
*** 120,129 **** --- 121,132 ---- #define JAVA_12_VERSION 56 #define JAVA_13_VERSION 57 + #define CONSTANT_CLASS_DESCRIPTORS 57 + void ClassFileParser::set_class_bad_constant_seen(short bad_constant) { assert((bad_constant == 19 || bad_constant == 20) && _major_version >= JAVA_9_VERSION, "Unexpected bad constant pool entry"); if (_bad_constant_seen == 0) _bad_constant_seen = bad_constant; }
*** 158,168 **** // Each of the following case guarantees one more byte in the stream // for the following tag or the access_flags following constant pool, // so we don't need bounds-check for reading tag. const u1 tag = cfs->get_u1_fast(); switch (tag) { ! case JVM_CONSTANT_Class : { cfs->guarantee_more(3, CHECK); // name_index, tag/access_flags const u2 name_index = cfs->get_u2_fast(); cp->klass_index_at_put(index, name_index); break; } --- 161,171 ---- // Each of the following case guarantees one more byte in the stream // for the following tag or the access_flags following constant pool, // so we don't need bounds-check for reading tag. const u1 tag = cfs->get_u1_fast(); switch (tag) { ! case JVM_CONSTANT_Class: { cfs->guarantee_more(3, CHECK); // name_index, tag/access_flags const u2 name_index = cfs->get_u2_fast(); cp->klass_index_at_put(index, name_index); break; }
*** 490,500 **** --- 493,510 ---- case JVM_CONSTANT_ClassIndex: { const int class_index = cp->klass_index_at(index); check_property(valid_symbol_at(class_index), "Invalid constant pool index %u in class file %s", class_index, CHECK); + + Symbol* const name = cp->symbol_at(class_index); + const unsigned int name_len = name->utf8_length(); + if (name->is_Q_signature()) { + cp->unresolved_qdescriptor_at_put(index, class_index, num_klasses++); + } else { cp->unresolved_klass_at_put(index, class_index, num_klasses++); + } break; } case JVM_CONSTANT_StringIndex: { const int string_index = cp->string_index_at(index); check_property(valid_symbol_at(string_index),
*** 1459,1473 **** --- 1469,1485 ---- STATIC_OOP, // Oops STATIC_BYTE, // Boolean, Byte, char STATIC_SHORT, // shorts STATIC_WORD, // ints STATIC_DOUBLE, // aligned long or double + STATIC_FLATTENABLE, // flattenable field NONSTATIC_OOP, NONSTATIC_BYTE, NONSTATIC_SHORT, NONSTATIC_WORD, NONSTATIC_DOUBLE, + NONSTATIC_FLATTENABLE, MAX_FIELD_ALLOCATION_TYPE, BAD_ALLOCATION_TYPE = -1 }; static FieldAllocationType _basic_type_to_atype[2 * (T_CONFLICT + 1)] = {
*** 1483,1498 **** NONSTATIC_SHORT, // T_SHORT = 9, NONSTATIC_WORD, // T_INT = 10, NONSTATIC_DOUBLE, // T_LONG = 11, NONSTATIC_OOP, // T_OBJECT = 12, NONSTATIC_OOP, // T_ARRAY = 13, ! BAD_ALLOCATION_TYPE, // T_VOID = 14, ! BAD_ALLOCATION_TYPE, // T_ADDRESS = 15, ! BAD_ALLOCATION_TYPE, // T_NARROWOOP = 16, ! BAD_ALLOCATION_TYPE, // T_METADATA = 17, ! BAD_ALLOCATION_TYPE, // T_NARROWKLASS = 18, ! BAD_ALLOCATION_TYPE, // T_CONFLICT = 19, BAD_ALLOCATION_TYPE, // 0 BAD_ALLOCATION_TYPE, // 1 BAD_ALLOCATION_TYPE, // 2 BAD_ALLOCATION_TYPE, // 3 STATIC_BYTE , // T_BOOLEAN = 4, --- 1495,1511 ---- NONSTATIC_SHORT, // T_SHORT = 9, NONSTATIC_WORD, // T_INT = 10, NONSTATIC_DOUBLE, // T_LONG = 11, NONSTATIC_OOP, // T_OBJECT = 12, NONSTATIC_OOP, // T_ARRAY = 13, ! NONSTATIC_OOP, // T_VALUETYPE = 14, ! BAD_ALLOCATION_TYPE, // T_VOID = 15, ! BAD_ALLOCATION_TYPE, // T_ADDRESS = 16, ! BAD_ALLOCATION_TYPE, // T_NARROWOOP = 17, ! BAD_ALLOCATION_TYPE, // T_METADATA = 18, ! BAD_ALLOCATION_TYPE, // T_NARROWKLASS = 19, ! BAD_ALLOCATION_TYPE, // T_CONFLICT = 20, BAD_ALLOCATION_TYPE, // 0 BAD_ALLOCATION_TYPE, // 1 BAD_ALLOCATION_TYPE, // 2 BAD_ALLOCATION_TYPE, // 3 STATIC_BYTE , // T_BOOLEAN = 4,
*** 1503,1524 **** STATIC_SHORT, // T_SHORT = 9, STATIC_WORD, // T_INT = 10, STATIC_DOUBLE, // T_LONG = 11, STATIC_OOP, // T_OBJECT = 12, STATIC_OOP, // T_ARRAY = 13, ! BAD_ALLOCATION_TYPE, // T_VOID = 14, ! BAD_ALLOCATION_TYPE, // T_ADDRESS = 15, ! BAD_ALLOCATION_TYPE, // T_NARROWOOP = 16, ! BAD_ALLOCATION_TYPE, // T_METADATA = 17, ! BAD_ALLOCATION_TYPE, // T_NARROWKLASS = 18, ! BAD_ALLOCATION_TYPE, // T_CONFLICT = 19, }; ! static FieldAllocationType basic_type_to_atype(bool is_static, BasicType type) { assert(type >= T_BOOLEAN && type < T_VOID, "only allowable values"); FieldAllocationType result = _basic_type_to_atype[type + (is_static ? (T_CONFLICT + 1) : 0)]; assert(result != BAD_ALLOCATION_TYPE, "bad type"); return result; } class ClassFileParser::FieldAllocationCount : public ResourceObj { public: --- 1516,1541 ---- STATIC_SHORT, // T_SHORT = 9, STATIC_WORD, // T_INT = 10, STATIC_DOUBLE, // T_LONG = 11, STATIC_OOP, // T_OBJECT = 12, STATIC_OOP, // T_ARRAY = 13, ! STATIC_OOP, // T_VALUETYPE = 14, ! BAD_ALLOCATION_TYPE, // T_VOID = 15, ! BAD_ALLOCATION_TYPE, // T_ADDRESS = 16, ! BAD_ALLOCATION_TYPE, // T_NARROWOOP = 17, ! BAD_ALLOCATION_TYPE, // T_METADATA = 18, ! BAD_ALLOCATION_TYPE, // T_NARROWKLASS = 19, ! BAD_ALLOCATION_TYPE, // T_CONFLICT = 20 }; ! static FieldAllocationType basic_type_to_atype(bool is_static, BasicType type, bool is_flattenable) { assert(type >= T_BOOLEAN && type < T_VOID, "only allowable values"); FieldAllocationType result = _basic_type_to_atype[type + (is_static ? (T_CONFLICT + 1) : 0)]; assert(result != BAD_ALLOCATION_TYPE, "bad type"); + if (is_flattenable) { + result = is_static ? STATIC_FLATTENABLE : NONSTATIC_FLATTENABLE; + } return result; } class ClassFileParser::FieldAllocationCount : public ResourceObj { public:
*** 1528,1539 **** for (int i = 0; i < MAX_FIELD_ALLOCATION_TYPE; i++) { count[i] = 0; } } ! FieldAllocationType update(bool is_static, BasicType type) { ! FieldAllocationType atype = basic_type_to_atype(is_static, type); if (atype != BAD_ALLOCATION_TYPE) { // Make sure there is no overflow with injected fields. assert(count[atype] < 0xFFFF, "More than 65535 fields"); count[atype]++; } --- 1545,1556 ---- for (int i = 0; i < MAX_FIELD_ALLOCATION_TYPE; i++) { count[i] = 0; } } ! FieldAllocationType update(bool is_static, BasicType type, bool is_flattenable) { ! FieldAllocationType atype = basic_type_to_atype(is_static, type, is_flattenable); if (atype != BAD_ALLOCATION_TYPE) { // Make sure there is no overflow with injected fields. assert(count[atype] < 0xFFFF, "More than 65535 fields"); count[atype]++; }
*** 1543,1552 **** --- 1560,1570 ---- // Side-effects: populates the _fields, _fields_annotations, // _fields_type_annotations fields void ClassFileParser::parse_fields(const ClassFileStream* const cfs, bool is_interface, + bool is_value_type, FieldAllocationCount* const fac, ConstantPool* cp, const int cp_size, u2* const java_fields_count_ptr, TRAPS) {
*** 1565,1575 **** *java_fields_count_ptr = length; int num_injected = 0; const InjectedField* const injected = JavaClasses::get_injected(_class_name, &num_injected); ! const int total_fields = length + num_injected; // The field array starts with tuples of shorts // [access, name index, sig index, initial value index, byte offset]. // A generic signature slot only exists for field with generic // signature attribute. And the access flag is set with --- 1583,1594 ---- *java_fields_count_ptr = length; int num_injected = 0; const InjectedField* const injected = JavaClasses::get_injected(_class_name, &num_injected); ! ! const int total_fields = length + num_injected + (is_value_type ? 1 : 0); // The field array starts with tuples of shorts // [access, name index, sig index, initial value index, byte offset]. // A generic signature slot only exists for field with generic // signature attribute. And the access flag is set with
*** 1599,1611 **** int num_generic_signature = 0; for (int n = 0; n < length; n++) { // access_flags, name_index, descriptor_index, attributes_count cfs->guarantee_more(8, CHECK); AccessFlags access_flags; - const jint flags = cfs->get_u2_fast() & JVM_RECOGNIZED_FIELD_MODIFIERS; - verify_legal_field_modifiers(flags, is_interface, CHECK); access_flags.set_flags(flags); const u2 name_index = cfs->get_u2_fast(); check_property(valid_symbol_at(name_index), "Invalid constant pool index %u for field name in class file %s", --- 1618,1632 ---- int num_generic_signature = 0; for (int n = 0; n < length; n++) { // access_flags, name_index, descriptor_index, attributes_count cfs->guarantee_more(8, CHECK); + jint recognized_modifiers = JVM_RECOGNIZED_FIELD_MODIFIERS; + + const jint flags = cfs->get_u2_fast() & recognized_modifiers; + verify_legal_field_modifiers(flags, is_interface, is_value_type, CHECK); AccessFlags access_flags; access_flags.set_flags(flags); const u2 name_index = cfs->get_u2_fast(); check_property(valid_symbol_at(name_index), "Invalid constant pool index %u for field name in class file %s",
*** 1617,1626 **** --- 1638,1663 ---- check_property(valid_symbol_at(signature_index), "Invalid constant pool index %u for field signature in class file %s", signature_index, CHECK); const Symbol* const sig = cp->symbol_at(signature_index); verify_legal_field_signature(name, sig, CHECK); + assert(!access_flags.is_flattenable(), "ACC_FLATTENABLE should have been filtered out"); + if (sig->is_Q_signature()) { + // assert(_major_version >= CONSTANT_CLASS_DESCRIPTORS, "Q-descriptors are only supported in recent classfiles"); + access_flags.set_is_flattenable(); + } + if (access_flags.is_flattenable()) { + // Array flattenability cannot be specified. Arrays of value classes are + // are always flattenable. Arrays of other classes are not flattenable. + if (sig->utf8_length() > 1 && sig->char_at(0) == '[') { + classfile_parse_error( + "Field \"%s\" with signature \"%s\" in class file %s is invalid." + " ACC_FLATTENABLE cannot be specified for an array", + name->as_C_string(), sig->as_klass_external_name(), CHECK); + } + _has_flattenable_fields = true; + } u2 constantvalue_index = 0; bool is_synthetic = false; u2 generic_signature_index = 0; const bool is_static = access_flags.is_static();
*** 1676,1686 **** signature_index, constantvalue_index); const BasicType type = cp->basic_type_for_signature_at(signature_index); // Remember how many oops we encountered and compute allocation type ! const FieldAllocationType atype = fac->update(is_static, type); field->set_allocation_type(atype); // After field is initialized with type, we can augment it with aux info if (parsed_annotations.has_any_annotations()) parsed_annotations.apply_to(field); --- 1713,1723 ---- signature_index, constantvalue_index); const BasicType type = cp->basic_type_for_signature_at(signature_index); // Remember how many oops we encountered and compute allocation type ! const FieldAllocationType atype = fac->update(is_static, type, access_flags.is_flattenable()); field->set_allocation_type(atype); // After field is initialized with type, we can augment it with aux info if (parsed_annotations.has_any_annotations()) parsed_annotations.apply_to(field);
*** 1717,1732 **** 0); const BasicType type = FieldType::basic_type(injected[n].signature()); // Remember how many oops we encountered and compute allocation type ! const FieldAllocationType atype = fac->update(false, type); field->set_allocation_type(atype); index++; } } assert(NULL == _fields, "invariant"); _fields = MetadataFactory::new_array<u2>(_loader_data, index * FieldInfo::field_slots + num_generic_signature, --- 1754,1782 ---- 0); const BasicType type = FieldType::basic_type(injected[n].signature()); // Remember how many oops we encountered and compute allocation type ! const FieldAllocationType atype = fac->update(false, type, false); field->set_allocation_type(atype); index++; } } + if (is_value_type) { + index = length + num_injected; + FieldInfo* const field = FieldInfo::from_field_array(fa, index); + field->initialize(JVM_ACC_FIELD_INTERNAL | JVM_ACC_STATIC, + vmSymbols::default_value_name_enum, + vmSymbols::java_lang_Object_enum, + 0); + const BasicType type = FieldType::basic_type(vmSymbols::object_signature()); + const FieldAllocationType atype = fac->update(true, type, false); + field->set_allocation_type(atype); + index++; + } + assert(NULL == _fields, "invariant"); _fields = MetadataFactory::new_array<u2>(_loader_data, index * FieldInfo::field_slots + num_generic_signature,
*** 2346,2355 **** --- 2396,2406 ---- // from the method back up to the containing klass. These flag values // are added to klass's access_flags. Method* ClassFileParser::parse_method(const ClassFileStream* const cfs, bool is_interface, + bool is_value_type, const ConstantPool* cp, AccessFlags* const promoted_flags, TRAPS) { assert(cfs != NULL, "invariant"); assert(cp != NULL, "invariant");
*** 2386,2400 **** flags &= JVM_ACC_STATIC | JVM_ACC_STRICT; } else { classfile_parse_error("Method <clinit> is not static in class file %s", CHECK_NULL); } } else { ! verify_legal_method_modifiers(flags, is_interface, name, CHECK_NULL); } ! if (name == vmSymbols::object_initializer_name() && is_interface) { classfile_parse_error("Interface cannot have a method named <init>, class file %s", CHECK_NULL); } int args_size = -1; // only used when _need_verify is true if (_need_verify) { args_size = ((flags & JVM_ACC_STATIC) ? 0 : 1) + --- 2437,2457 ---- flags &= JVM_ACC_STATIC | JVM_ACC_STRICT; } else { classfile_parse_error("Method <clinit> is not static in class file %s", CHECK_NULL); } } else { ! verify_legal_method_modifiers(flags, is_interface, is_value_type, name, CHECK_NULL); } ! if (name == vmSymbols::object_initializer_name()) { ! if (is_interface) { classfile_parse_error("Interface cannot have a method named <init>, class file %s", CHECK_NULL); + /* TBD: uncomment when javac stops generating <init>() for value types. + } else if (is_value_type) { + classfile_parse_error("Value Type cannot have a method named <init>, class file %s", CHECK_NULL); + */ + } } int args_size = -1; // only used when _need_verify is true if (_need_verify) { args_size = ((flags & JVM_ACC_STATIC) ? 0 : 1) +
*** 2961,2970 **** --- 3018,3028 ---- // from the methods back up to the containing klass. These flag values // are added to klass's access_flags. // Side-effects: populates the _methods field in the parser void ClassFileParser::parse_methods(const ClassFileStream* const cfs, bool is_interface, + bool is_value_type, AccessFlags* promoted_flags, bool* has_final_method, bool* declares_nonstatic_concrete_methods, TRAPS) { assert(cfs != NULL, "invariant");
*** 2985,2994 **** --- 3043,3053 ---- CHECK); for (int index = 0; index < length; index++) { Method* method = parse_method(cfs, is_interface, + is_value_type, _cp, promoted_flags, CHECK); if (method->is_final()) {
*** 3177,3194 **** inner_name_index, CHECK_0); if (_need_verify) { guarantee_property(inner_class_info_index != outer_class_info_index, "Class is both outer and inner class in class file %s", CHECK_0); } ! // Access flags ! jint flags; // JVM_ACC_MODULE is defined in JDK-9 and later. if (_major_version >= JAVA_9_VERSION) { ! flags = cfs->get_u2_fast() & (RECOGNIZED_INNER_CLASS_MODIFIERS | JVM_ACC_MODULE); ! } else { ! flags = cfs->get_u2_fast() & RECOGNIZED_INNER_CLASS_MODIFIERS; } if ((flags & JVM_ACC_INTERFACE) && _major_version < JAVA_6_VERSION) { // Set abstract bit for old class files for backward compatibility flags |= JVM_ACC_ABSTRACT; } verify_legal_class_modifiers(flags, CHECK_0); --- 3236,3259 ---- inner_name_index, CHECK_0); if (_need_verify) { guarantee_property(inner_class_info_index != outer_class_info_index, "Class is both outer and inner class in class file %s", CHECK_0); } ! ! jint recognized_modifiers = RECOGNIZED_INNER_CLASS_MODIFIERS; // JVM_ACC_MODULE is defined in JDK-9 and later. if (_major_version >= JAVA_9_VERSION) { ! recognized_modifiers |= JVM_ACC_MODULE; } + // JVM_ACC_VALUE is defined for class file version 55 and later + if (supports_value_types()) { + recognized_modifiers |= JVM_ACC_VALUE; + } + + // Access flags + jint flags = cfs->get_u2_fast() & recognized_modifiers; + if ((flags & JVM_ACC_INTERFACE) && _major_version < JAVA_6_VERSION) { // Set abstract bit for old class files for backward compatibility flags |= JVM_ACC_ABSTRACT; } verify_legal_class_modifiers(flags, CHECK_0);
*** 3387,3396 **** --- 3452,3463 ---- bool runtime_invisible_type_annotations_exists = false; bool runtime_invisible_annotations_exists = false; bool parsed_source_debug_ext_annotations_exist = false; const u1* inner_classes_attribute_start = NULL; u4 inner_classes_attribute_length = 0; + const u1* value_types_attribute_start = NULL; + u4 value_types_attribute_length = 0; u2 enclosing_method_class_index = 0; u2 enclosing_method_method_index = 0; const u1* nest_members_attribute_start = NULL; u4 nest_members_attribute_length = 0;
*** 3736,3746 **** TRAPS) { assert(cp != NULL, "invariant"); const InstanceKlass* super_klass = NULL; if (super_class_index == 0) { ! check_property(_class_name == vmSymbols::java_lang_Object(), "Invalid superclass index %u in class file %s", super_class_index, CHECK_NULL); } else { check_property(valid_klass_reference_at(super_class_index), --- 3803,3814 ---- TRAPS) { assert(cp != NULL, "invariant"); const InstanceKlass* super_klass = NULL; if (super_class_index == 0) { ! check_property(_class_name == vmSymbols::java_lang_Object() ! || (_access_flags.get_flags() & JVM_ACC_VALUE), "Invalid superclass index %u in class file %s", super_class_index, CHECK_NULL); } else { check_property(valid_klass_reference_at(super_class_index),
*** 3763,3805 **** } } return super_klass; } - static unsigned int compute_oop_map_count(const InstanceKlass* super, - unsigned int nonstatic_oop_map_count, - int first_nonstatic_oop_offset) { - - unsigned int map_count = - NULL == super ? 0 : super->nonstatic_oop_map_count(); - if (nonstatic_oop_map_count > 0) { - // We have oops to add to map - if (map_count == 0) { - map_count = nonstatic_oop_map_count; - } - else { - // Check whether we should add a new map block or whether the last one can - // be extended - const OopMapBlock* const first_map = super->start_of_nonstatic_oop_maps(); - const OopMapBlock* const last_map = first_map + map_count - 1; - - const int next_offset = last_map->offset() + last_map->count() * heapOopSize; - if (next_offset == first_nonstatic_oop_offset) { - // There is no gap bettwen superklass's last oop field and first - // local oop field, merge maps. - nonstatic_oop_map_count -= 1; - } - else { - // Superklass didn't end with a oop field, add extra maps - assert(next_offset < first_nonstatic_oop_offset, "just checking"); - } - map_count += nonstatic_oop_map_count; - } - } - return map_count; - } - #ifndef PRODUCT static void print_field_layout(const Symbol* name, Array<u2>* fields, const constantPoolHandle& cp, int instance_size, --- 3831,3840 ----
*** 3836,3855 **** #endif // Values needed for oopmap and InstanceKlass creation class ClassFileParser::FieldLayoutInfo : public ResourceObj { public: ! int* nonstatic_oop_offsets; ! unsigned int* nonstatic_oop_counts; ! unsigned int nonstatic_oop_map_count; ! unsigned int total_oop_map_count; int instance_size; int nonstatic_field_size; int static_field_size; bool has_nonstatic_fields; }; // Layout fields and fill in FieldLayoutInfo. Could use more refactoring! void ClassFileParser::layout_fields(ConstantPool* cp, const FieldAllocationCount* fac, const ClassAnnotationCollector* parsed_annotations, FieldLayoutInfo* info, --- 3871,4032 ---- #endif // Values needed for oopmap and InstanceKlass creation class ClassFileParser::FieldLayoutInfo : public ResourceObj { public: ! OopMapBlocksBuilder* oop_map_blocks; int instance_size; int nonstatic_field_size; int static_field_size; bool has_nonstatic_fields; }; + // Utility to collect and compact oop maps during layout + class ClassFileParser::OopMapBlocksBuilder : public ResourceObj { + public: + OopMapBlock* nonstatic_oop_maps; + unsigned int nonstatic_oop_map_count; + unsigned int max_nonstatic_oop_maps; + + public: + OopMapBlocksBuilder(unsigned int max_blocks, TRAPS) { + max_nonstatic_oop_maps = max_blocks; + nonstatic_oop_map_count = 0; + if (max_blocks == 0) { + nonstatic_oop_maps = NULL; + } else { + nonstatic_oop_maps = NEW_RESOURCE_ARRAY_IN_THREAD( + THREAD, OopMapBlock, max_nonstatic_oop_maps); + memset(nonstatic_oop_maps, 0, sizeof(OopMapBlock) * max_blocks); + } + } + + OopMapBlock* last_oop_map() const { + assert(nonstatic_oop_map_count > 0, "Has no oop maps"); + return nonstatic_oop_maps + (nonstatic_oop_map_count - 1); + } + + // addition of super oop maps + void initialize_inherited_blocks(OopMapBlock* blocks, unsigned int nof_blocks) { + assert(nof_blocks && nonstatic_oop_map_count == 0 && + nof_blocks <= max_nonstatic_oop_maps, "invariant"); + + memcpy(nonstatic_oop_maps, blocks, sizeof(OopMapBlock) * nof_blocks); + nonstatic_oop_map_count += nof_blocks; + } + + // collection of oops + void add(int offset, int count) { + if (nonstatic_oop_map_count == 0) { + nonstatic_oop_map_count++; + } + OopMapBlock* nonstatic_oop_map = last_oop_map(); + if (nonstatic_oop_map->count() == 0) { // Unused map, set it up + nonstatic_oop_map->set_offset(offset); + nonstatic_oop_map->set_count(count); + } else if (nonstatic_oop_map->is_contiguous(offset)) { // contiguous, add + nonstatic_oop_map->increment_count(count); + } else { // Need a new one... + nonstatic_oop_map_count++; + assert(nonstatic_oop_map_count <= max_nonstatic_oop_maps, "range check"); + nonstatic_oop_map = last_oop_map(); + nonstatic_oop_map->set_offset(offset); + nonstatic_oop_map->set_count(count); + } + } + + // general purpose copy, e.g. into allocated instanceKlass + void copy(OopMapBlock* dst) { + if (nonstatic_oop_map_count != 0) { + memcpy(dst, nonstatic_oop_maps, sizeof(OopMapBlock) * nonstatic_oop_map_count); + } + } + + // Sort and compact adjacent blocks + void compact(TRAPS) { + if (nonstatic_oop_map_count <= 1) { + return; + } + /* + * Since field layout sneeks in oops before values, we will be able to condense + * blocks. There is potential to compact between super, own refs and values + * containing refs. + * + * Currently compaction is slightly limited due to values being 8 byte aligned. + * This may well change: FixMe if doesn't, the code below is fairly general purpose + * and maybe it doesn't need to be. + */ + qsort(nonstatic_oop_maps, nonstatic_oop_map_count, sizeof(OopMapBlock), + (_sort_Fn)OopMapBlock::compare_offset); + if (nonstatic_oop_map_count < 2) { + return; + } + + //Make a temp copy, and iterate through and copy back into the orig + ResourceMark rm(THREAD); + OopMapBlock* oop_maps_copy = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, OopMapBlock, + nonstatic_oop_map_count); + OopMapBlock* oop_maps_copy_end = oop_maps_copy + nonstatic_oop_map_count; + copy(oop_maps_copy); + OopMapBlock* nonstatic_oop_map = nonstatic_oop_maps; + unsigned int new_count = 1; + oop_maps_copy++; + while(oop_maps_copy < oop_maps_copy_end) { + assert(nonstatic_oop_map->offset() < oop_maps_copy->offset(), "invariant"); + if (nonstatic_oop_map->is_contiguous(oop_maps_copy->offset())) { + nonstatic_oop_map->increment_count(oop_maps_copy->count()); + } else { + nonstatic_oop_map++; + new_count++; + nonstatic_oop_map->set_offset(oop_maps_copy->offset()); + nonstatic_oop_map->set_count(oop_maps_copy->count()); + } + oop_maps_copy++; + } + assert(new_count <= nonstatic_oop_map_count, "end up with more maps after compact() ?"); + nonstatic_oop_map_count = new_count; + } + + void print_on(outputStream* st) const { + st->print_cr(" OopMapBlocks: %3d /%3d", nonstatic_oop_map_count, max_nonstatic_oop_maps); + if (nonstatic_oop_map_count > 0) { + OopMapBlock* map = nonstatic_oop_maps; + OopMapBlock* last_map = last_oop_map(); + assert(map <= last_map, "Last less than first"); + while (map <= last_map) { + st->print_cr(" Offset: %3d -%3d Count: %3d", map->offset(), + map->offset() + map->offset_span() - heapOopSize, map->count()); + map++; + } + } + } + + void print_value_on(outputStream* st) const { + print_on(st); + } + + }; + + void ClassFileParser::throwValueTypeLimitation(THREAD_AND_LOCATION_DECL, + const char* msg, + const Symbol* name, + const Symbol* sig) const { + + ResourceMark rm(THREAD); + if (name == NULL || sig == NULL) { + Exceptions::fthrow(THREAD_AND_LOCATION_ARGS, + vmSymbols::java_lang_ClassFormatError(), + "class: %s - %s", _class_name->as_C_string(), msg); + } + else { + Exceptions::fthrow(THREAD_AND_LOCATION_ARGS, + vmSymbols::java_lang_ClassFormatError(), + "\"%s\" sig: \"%s\" class: %s - %s", name->as_C_string(), sig->as_C_string(), + _class_name->as_C_string(), msg); + } + } + // Layout fields and fill in FieldLayoutInfo. Could use more refactoring! void ClassFileParser::layout_fields(ConstantPool* cp, const FieldAllocationCount* fac, const ClassAnnotationCollector* parsed_annotations, FieldLayoutInfo* info,
*** 3858,3867 **** --- 4035,4050 ---- assert(cp != NULL, "invariant"); // Field size and offset computation int nonstatic_field_size = _super_klass == NULL ? 0 : _super_klass->nonstatic_field_size(); + int next_nonstatic_valuetype_offset = 0; + int first_nonstatic_valuetype_offset = 0; + + // Fields that are value types are handled differently depending if they are static or not: + // - static fields are oops + // - non-static fields are embedded // Count the contended fields by type. // // We ignore static fields, because @Contended is not supported for them. // The layout code below will also ignore the static fields.
*** 3878,3889 **** } // Calculate the starting byte offsets int next_static_oop_offset = InstanceMirrorKlass::offset_of_static_fields(); int next_static_double_offset = next_static_oop_offset + ! ((fac->count[STATIC_OOP]) * heapOopSize); if (fac->count[STATIC_DOUBLE]) { next_static_double_offset = align_up(next_static_double_offset, BytesPerLong); } int next_static_word_offset = next_static_double_offset + --- 4061,4073 ---- } // Calculate the starting byte offsets int next_static_oop_offset = InstanceMirrorKlass::offset_of_static_fields(); + // Value types in static fields are not embedded, they are handled with oops int next_static_double_offset = next_static_oop_offset + ! ((fac->count[STATIC_OOP] + fac->count[STATIC_FLATTENABLE]) * heapOopSize); if (fac->count[STATIC_DOUBLE]) { next_static_double_offset = align_up(next_static_double_offset, BytesPerLong); } int next_static_word_offset = next_static_double_offset +
*** 3894,3950 **** ((fac->count[STATIC_SHORT]) * BytesPerShort); int nonstatic_fields_start = instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size * heapOopSize; int next_nonstatic_field_offset = nonstatic_fields_start; const bool is_contended_class = parsed_annotations->is_contended(); // Class is contended, pad before all the fields if (is_contended_class) { next_nonstatic_field_offset += ContendedPaddingWidth; } // Compute the non-contended fields count. // The packing code below relies on these counts to determine if some field // can be squeezed into the alignment gap. Contended fields are obviously // exempt from that. unsigned int nonstatic_double_count = fac->count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE]; unsigned int nonstatic_word_count = fac->count[NONSTATIC_WORD] - fac_contended.count[NONSTATIC_WORD]; unsigned int nonstatic_short_count = fac->count[NONSTATIC_SHORT] - fac_contended.count[NONSTATIC_SHORT]; unsigned int nonstatic_byte_count = fac->count[NONSTATIC_BYTE] - fac_contended.count[NONSTATIC_BYTE]; unsigned int nonstatic_oop_count = fac->count[NONSTATIC_OOP] - fac_contended.count[NONSTATIC_OOP]; // Total non-static fields count, including every contended field unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] + fac->count[NONSTATIC_SHORT] + fac->count[NONSTATIC_BYTE] + ! fac->count[NONSTATIC_OOP]; const bool super_has_nonstatic_fields = (_super_klass != NULL && _super_klass->has_nonstatic_fields()); const bool has_nonstatic_fields = super_has_nonstatic_fields || (nonstatic_fields_count != 0); // Prepare list of oops for oop map generation. // // "offset" and "count" lists are describing the set of contiguous oop // regions. offset[i] is the start of the i-th region, which then has // count[i] oops following. Before we know how many regions are required, // we pessimistically allocate the maps to fit all the oops into the // distinct regions. // ! // TODO: We add +1 to always allocate non-zero resource arrays; we need ! // to figure out if we still need to do this. ! unsigned int nonstatic_oop_map_count = 0; ! unsigned int max_nonstatic_oop_maps = fac->count[NONSTATIC_OOP] + 1; ! ! int* nonstatic_oop_offsets = NEW_RESOURCE_ARRAY_IN_THREAD( ! THREAD, int, max_nonstatic_oop_maps); ! unsigned int* const nonstatic_oop_counts = NEW_RESOURCE_ARRAY_IN_THREAD( ! THREAD, unsigned int, max_nonstatic_oop_maps); int first_nonstatic_oop_offset = 0; // will be set for first oop field bool compact_fields = CompactFields; int allocation_style = FieldsAllocationStyle; --- 4078,4235 ---- ((fac->count[STATIC_SHORT]) * BytesPerShort); int nonstatic_fields_start = instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size * heapOopSize; + // First field of value types is aligned on a long boundary in order to ease + // in-lining of value types (with header removal) in packed arrays and + // flatten value types + int initial_value_type_padding = 0; + if (is_value_type()) { + int old = nonstatic_fields_start; + nonstatic_fields_start = align_up(nonstatic_fields_start, BytesPerLong); + initial_value_type_padding = nonstatic_fields_start - old; + } + int next_nonstatic_field_offset = nonstatic_fields_start; const bool is_contended_class = parsed_annotations->is_contended(); // Class is contended, pad before all the fields if (is_contended_class) { next_nonstatic_field_offset += ContendedPaddingWidth; } + // Temporary value types restrictions + if (is_value_type()) { + if (is_contended_class) { + throwValueTypeLimitation(THREAD_AND_LOCATION, "Value Types do not support @Contended annotation yet"); + return; + } + } + // Compute the non-contended fields count. // The packing code below relies on these counts to determine if some field // can be squeezed into the alignment gap. Contended fields are obviously // exempt from that. unsigned int nonstatic_double_count = fac->count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE]; unsigned int nonstatic_word_count = fac->count[NONSTATIC_WORD] - fac_contended.count[NONSTATIC_WORD]; unsigned int nonstatic_short_count = fac->count[NONSTATIC_SHORT] - fac_contended.count[NONSTATIC_SHORT]; unsigned int nonstatic_byte_count = fac->count[NONSTATIC_BYTE] - fac_contended.count[NONSTATIC_BYTE]; unsigned int nonstatic_oop_count = fac->count[NONSTATIC_OOP] - fac_contended.count[NONSTATIC_OOP]; + int static_value_type_count = 0; + int nonstatic_value_type_count = 0; + int* nonstatic_value_type_indexes = NULL; + Klass** nonstatic_value_type_klasses = NULL; + unsigned int value_type_oop_map_count = 0; + int not_flattened_value_types = 0; + + int max_nonstatic_value_type = fac->count[NONSTATIC_FLATTENABLE] + 1; + + nonstatic_value_type_indexes = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, int, + max_nonstatic_value_type); + for (int i = 0; i < max_nonstatic_value_type; i++) { + nonstatic_value_type_indexes[i] = -1; + } + nonstatic_value_type_klasses = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, Klass*, + max_nonstatic_value_type); + + for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) { + if (fs.allocation_type() == STATIC_FLATTENABLE) { + // Pre-resolve the flattenable field and check for value type circularity + // issues. Note that super-class circularity checks are not needed here + // because flattenable fields can only be in value types and value types + // only have java.lang.Object as their super class. + // Also, note that super-interface circularity checks are not needed + // because interfaces cannot be value types. + ResourceMark rm; + if (!fs.signature()->is_Q_signature()) { + THROW(vmSymbols::java_lang_ClassFormatError()); + } + Klass* klass = + SystemDictionary::resolve_flattenable_field_or_fail(&fs, + Handle(THREAD, _loader_data->class_loader()), + _protection_domain, true, CHECK); + assert(klass != NULL, "Sanity check"); + if (!klass->access_flags().is_value_type()) { + THROW(vmSymbols::java_lang_IncompatibleClassChangeError()); + } + static_value_type_count++; + } else if (fs.allocation_type() == NONSTATIC_FLATTENABLE) { + // Pre-resolve the flattenable field and check for value type circularity issues. + ResourceMark rm; + if (!fs.signature()->is_Q_signature()) { + THROW(vmSymbols::java_lang_ClassFormatError()); + } + Klass* klass = + SystemDictionary::resolve_flattenable_field_or_fail(&fs, + Handle(THREAD, _loader_data->class_loader()), + _protection_domain, true, CHECK); + assert(klass != NULL, "Sanity check"); + if (!klass->access_flags().is_value_type()) { + THROW(vmSymbols::java_lang_IncompatibleClassChangeError()); + } + ValueKlass* vk = ValueKlass::cast(klass); + // Conditions to apply flattening or not should be defined in a single place + if ((ValueFieldMaxFlatSize < 0) || (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize) { + nonstatic_value_type_indexes[nonstatic_value_type_count] = fs.index(); + nonstatic_value_type_klasses[nonstatic_value_type_count] = klass; + nonstatic_value_type_count++; + + ValueKlass* vklass = ValueKlass::cast(klass); + if (vklass->contains_oops()) { + value_type_oop_map_count += vklass->nonstatic_oop_map_count(); + } + fs.set_flattened(true); + } else { + not_flattened_value_types++; + fs.set_flattened(false); + } + } + } + + // Adjusting non_static_oop_count to take into account not flattened value types; + nonstatic_oop_count += not_flattened_value_types; + // Total non-static fields count, including every contended field unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] + fac->count[NONSTATIC_SHORT] + fac->count[NONSTATIC_BYTE] + ! fac->count[NONSTATIC_OOP] + fac->count[NONSTATIC_FLATTENABLE]; const bool super_has_nonstatic_fields = (_super_klass != NULL && _super_klass->has_nonstatic_fields()); const bool has_nonstatic_fields = super_has_nonstatic_fields || (nonstatic_fields_count != 0); + const bool has_nonstatic_value_fields = nonstatic_value_type_count > 0; + if (is_value_type() && (!has_nonstatic_fields)) { + // There are a number of fixes required throughout the type system and JIT + throwValueTypeLimitation(THREAD_AND_LOCATION, "Value Types do not support zero instance size yet"); + return; + } // Prepare list of oops for oop map generation. // // "offset" and "count" lists are describing the set of contiguous oop // regions. offset[i] is the start of the i-th region, which then has // count[i] oops following. Before we know how many regions are required, // we pessimistically allocate the maps to fit all the oops into the // distinct regions. // ! int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count(); ! int max_oop_map_count = ! super_oop_map_count + ! fac->count[NONSTATIC_OOP] + ! value_type_oop_map_count + ! not_flattened_value_types; ! ! OopMapBlocksBuilder* nonstatic_oop_maps = new OopMapBlocksBuilder(max_oop_map_count, THREAD); ! if (super_oop_map_count > 0) { ! nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(), ! _super_klass->nonstatic_oop_map_count()); ! } int first_nonstatic_oop_offset = 0; // will be set for first oop field bool compact_fields = CompactFields; int allocation_style = FieldsAllocationStyle;
*** 3989,4005 **** } else if( allocation_style == 1 ) { // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields next_nonstatic_double_offset = next_nonstatic_field_offset; } else if( allocation_style == 2 ) { // Fields allocation: oops fields in super and sub classes are together. ! if( nonstatic_field_size > 0 && _super_klass != NULL && ! _super_klass->nonstatic_oop_map_size() > 0 ) { ! const unsigned int map_count = _super_klass->nonstatic_oop_map_count(); ! const OopMapBlock* const first_map = _super_klass->start_of_nonstatic_oop_maps(); ! const OopMapBlock* const last_map = first_map + map_count - 1; ! const int next_offset = last_map->offset() + (last_map->count() * heapOopSize); ! if (next_offset == next_nonstatic_field_offset) { allocation_style = 0; // allocate oops first next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize); } --- 4274,4285 ---- } else if( allocation_style == 1 ) { // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields next_nonstatic_double_offset = next_nonstatic_field_offset; } else if( allocation_style == 2 ) { // Fields allocation: oops fields in super and sub classes are together. ! if( nonstatic_field_size > 0 && super_oop_map_count > 0 ) { ! if (next_nonstatic_field_offset == nonstatic_oop_maps->last_oop_map()->end_offset()) { allocation_style = 0; // allocate oops first next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize); }
*** 4078,4087 **** --- 4358,4377 ---- next_nonstatic_oop_offset = align_up(next_nonstatic_oop_offset, heapOopSize); } next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize); } + // Aligning embedded value types + // bug below, the current algorithm to layout embedded value types always put them at the + // end of the layout, which doesn't match the different allocation policies the VM is + // supposed to provide => FixMe + // Note also that the current alignment policy is to make each value type starting on a + // 64 bits boundary. This could be optimized later. For instance, it could be nice to + // align value types according to their most constrained internal type. + next_nonstatic_valuetype_offset = align_up(next_nonstatic_padded_offset, BytesPerLong); + int next_value_type_index = 0; + // Iterate over fields again and compute correct offsets. // The field allocation type was temporarily stored in the offset slot. // oop fields are located before non-oop fields (static and non-static). for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) {
*** 4094,4103 **** --- 4384,4395 ---- int real_offset = 0; const FieldAllocationType atype = (const FieldAllocationType) fs.allocation_type(); // pack the rest of the fields switch (atype) { + // Value types in static fields are handled with oops + case STATIC_FLATTENABLE: // Fallthrough case STATIC_OOP: real_offset = next_static_oop_offset; next_static_oop_offset += heapOopSize; break; case STATIC_BYTE:
*** 4114,4152 **** break; case STATIC_DOUBLE: real_offset = next_static_double_offset; next_static_double_offset += BytesPerLong; break; case NONSTATIC_OOP: if( nonstatic_oop_space_count > 0 ) { real_offset = nonstatic_oop_space_offset; nonstatic_oop_space_offset += heapOopSize; nonstatic_oop_space_count -= 1; } else { real_offset = next_nonstatic_oop_offset; next_nonstatic_oop_offset += heapOopSize; } ! ! // Record this oop in the oop maps ! if( nonstatic_oop_map_count > 0 && ! nonstatic_oop_offsets[nonstatic_oop_map_count - 1] == ! real_offset - ! int(nonstatic_oop_counts[nonstatic_oop_map_count - 1]) * ! heapOopSize ) { ! // This oop is adjacent to the previous one, add to current oop map ! assert(nonstatic_oop_map_count - 1 < max_nonstatic_oop_maps, "range check"); ! nonstatic_oop_counts[nonstatic_oop_map_count - 1] += 1; ! } else { ! // This oop is not adjacent to the previous one, create new oop map ! assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check"); ! nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset; ! nonstatic_oop_counts [nonstatic_oop_map_count] = 1; ! nonstatic_oop_map_count += 1; ! if( first_nonstatic_oop_offset == 0 ) { // Undefined ! first_nonstatic_oop_offset = real_offset; ! } ! } break; case NONSTATIC_BYTE: if( nonstatic_byte_space_count > 0 ) { real_offset = nonstatic_byte_space_offset; nonstatic_byte_space_offset += 1; --- 4406,4450 ---- break; case STATIC_DOUBLE: real_offset = next_static_double_offset; next_static_double_offset += BytesPerLong; break; + case NONSTATIC_FLATTENABLE: + if (fs.is_flattened()) { + Klass* klass = nonstatic_value_type_klasses[next_value_type_index]; + assert(klass != NULL, "Klass should have been loaded and resolved earlier"); + assert(klass->access_flags().is_value_type(),"Must be a value type"); + ValueKlass* vklass = ValueKlass::cast(klass); + real_offset = next_nonstatic_valuetype_offset; + next_nonstatic_valuetype_offset += (vklass->size_helper()) * wordSize - vklass->first_field_offset(); + // aligning next value type on a 64 bits boundary + next_nonstatic_valuetype_offset = align_up(next_nonstatic_valuetype_offset, BytesPerLong); + next_value_type_index += 1; + + if (vklass->contains_oops()) { // add flatten oop maps + int diff = real_offset - vklass->first_field_offset(); + const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps(); + const OopMapBlock* const last_map = map + vklass->nonstatic_oop_map_count(); + while (map < last_map) { + nonstatic_oop_maps->add(map->offset() + diff, map->count()); + map++; + } + } + break; + } else { + // Fall through + } case NONSTATIC_OOP: if( nonstatic_oop_space_count > 0 ) { real_offset = nonstatic_oop_space_offset; nonstatic_oop_space_offset += heapOopSize; nonstatic_oop_space_count -= 1; } else { real_offset = next_nonstatic_oop_offset; next_nonstatic_oop_offset += heapOopSize; } ! nonstatic_oop_maps->add(real_offset, 1); break; case NONSTATIC_BYTE: if( nonstatic_byte_space_count > 0 ) { real_offset = nonstatic_byte_space_offset; nonstatic_byte_space_offset += 1;
*** 4251,4284 **** next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, BytesPerLong); real_offset = next_nonstatic_padded_offset; next_nonstatic_padded_offset += BytesPerLong; break; case NONSTATIC_OOP: next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, heapOopSize); real_offset = next_nonstatic_padded_offset; next_nonstatic_padded_offset += heapOopSize; ! ! // Record this oop in the oop maps ! if( nonstatic_oop_map_count > 0 && ! nonstatic_oop_offsets[nonstatic_oop_map_count - 1] == ! real_offset - ! int(nonstatic_oop_counts[nonstatic_oop_map_count - 1]) * ! heapOopSize ) { ! // This oop is adjacent to the previous one, add to current oop map ! assert(nonstatic_oop_map_count - 1 < max_nonstatic_oop_maps, "range check"); ! nonstatic_oop_counts[nonstatic_oop_map_count - 1] += 1; ! } else { ! // This oop is not adjacent to the previous one, create new oop map ! assert(nonstatic_oop_map_count < max_nonstatic_oop_maps, "range check"); ! nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset; ! nonstatic_oop_counts [nonstatic_oop_map_count] = 1; ! nonstatic_oop_map_count += 1; ! if( first_nonstatic_oop_offset == 0 ) { // Undefined ! first_nonstatic_oop_offset = real_offset; ! } ! } break; default: ShouldNotReachHere(); } --- 4549,4569 ---- next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, BytesPerLong); real_offset = next_nonstatic_padded_offset; next_nonstatic_padded_offset += BytesPerLong; break; + // Value types in static fields are handled with oops + case NONSTATIC_FLATTENABLE: + throwValueTypeLimitation(THREAD_AND_LOCATION, + "@Contended annotation not supported for value types yet", fs.name(), fs.signature()); + return; + case NONSTATIC_OOP: next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, heapOopSize); real_offset = next_nonstatic_padded_offset; next_nonstatic_padded_offset += heapOopSize; ! nonstatic_oop_maps->add(real_offset, 1); break; default: ShouldNotReachHere(); }
*** 4309,4324 **** // Entire class is contended, pad in the back. // This helps to alleviate memory contention effects for subclass fields // and/or adjacent object. if (is_contended_class) { next_nonstatic_padded_offset += ContendedPaddingWidth; } ! int notaligned_nonstatic_fields_end = next_nonstatic_padded_offset; ! int nonstatic_fields_end = align_up(notaligned_nonstatic_fields_end, heapOopSize); int instance_end = align_up(notaligned_nonstatic_fields_end, wordSize); int static_fields_end = align_up(next_static_byte_offset, wordSize); int static_field_size = (static_fields_end - InstanceMirrorKlass::offset_of_static_fields()) / wordSize; --- 4594,4621 ---- // Entire class is contended, pad in the back. // This helps to alleviate memory contention effects for subclass fields // and/or adjacent object. if (is_contended_class) { + assert(!is_value_type(), "@Contended not supported for value types yet"); next_nonstatic_padded_offset += ContendedPaddingWidth; } ! int notaligned_nonstatic_fields_end; ! if (nonstatic_value_type_count != 0) { ! notaligned_nonstatic_fields_end = next_nonstatic_valuetype_offset; ! } else { ! notaligned_nonstatic_fields_end = next_nonstatic_padded_offset; ! } ! int nonstatic_field_sz_align = heapOopSize; ! if (is_value_type()) { ! if ((notaligned_nonstatic_fields_end - nonstatic_fields_start) > heapOopSize) { ! nonstatic_field_sz_align = BytesPerLong; // value copy of fields only uses jlong copy ! } ! } ! int nonstatic_fields_end = align_up(notaligned_nonstatic_fields_end, nonstatic_field_sz_align); int instance_end = align_up(notaligned_nonstatic_fields_end, wordSize); int static_fields_end = align_up(next_static_byte_offset, wordSize); int static_field_size = (static_fields_end - InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
*** 4326,4417 **** (nonstatic_fields_end - nonstatic_fields_start) / heapOopSize; int instance_size = align_object_size(instance_end / wordSize); assert(instance_size == align_object_size(align_up( ! (instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize), ! wordSize) / wordSize), "consistent layout helper value"); // Invariant: nonstatic_field end/start should only change if there are // nonstatic fields in the class, or if the class is contended. We compare // against the non-aligned value, so that end alignment will not fail the // assert without actually having the fields. assert((notaligned_nonstatic_fields_end == nonstatic_fields_start) || is_contended_class || (nonstatic_fields_count > 0), "double-check nonstatic start/end"); // Number of non-static oop map blocks allocated at end of klass. ! const unsigned int total_oop_map_count = ! compute_oop_map_count(_super_klass, nonstatic_oop_map_count, ! first_nonstatic_oop_offset); #ifndef PRODUCT ! if (PrintFieldLayout) { print_field_layout(_class_name, _fields, cp, instance_size, nonstatic_fields_start, nonstatic_fields_end, static_fields_end); } #endif // Pass back information needed for InstanceKlass creation ! info->nonstatic_oop_offsets = nonstatic_oop_offsets; ! info->nonstatic_oop_counts = nonstatic_oop_counts; ! info->nonstatic_oop_map_count = nonstatic_oop_map_count; ! info->total_oop_map_count = total_oop_map_count; info->instance_size = instance_size; info->static_field_size = static_field_size; info->nonstatic_field_size = nonstatic_field_size; info->has_nonstatic_fields = has_nonstatic_fields; } ! static void fill_oop_maps(const InstanceKlass* k, ! unsigned int nonstatic_oop_map_count, ! const int* nonstatic_oop_offsets, ! const unsigned int* nonstatic_oop_counts) { ! ! assert(k != NULL, "invariant"); ! ! OopMapBlock* this_oop_map = k->start_of_nonstatic_oop_maps(); ! const InstanceKlass* const super = k->superklass(); ! const unsigned int super_count = super ? super->nonstatic_oop_map_count() : 0; ! if (super_count > 0) { ! // Copy maps from superklass ! OopMapBlock* super_oop_map = super->start_of_nonstatic_oop_maps(); ! for (unsigned int i = 0; i < super_count; ++i) { ! *this_oop_map++ = *super_oop_map++; ! } ! } ! ! if (nonstatic_oop_map_count > 0) { ! if (super_count + nonstatic_oop_map_count > k->nonstatic_oop_map_count()) { ! // The counts differ because there is no gap between superklass's last oop ! // field and the first local oop field. Extend the last oop map copied ! // from the superklass instead of creating new one. ! nonstatic_oop_map_count--; ! nonstatic_oop_offsets++; ! this_oop_map--; ! this_oop_map->set_count(this_oop_map->count() + *nonstatic_oop_counts++); ! this_oop_map++; ! } ! ! // Add new map blocks, fill them ! while (nonstatic_oop_map_count-- > 0) { ! this_oop_map->set_offset(*nonstatic_oop_offsets++); ! this_oop_map->set_count(*nonstatic_oop_counts++); ! this_oop_map++; ! } ! assert(k->start_of_nonstatic_oop_maps() + k->nonstatic_oop_map_count() == ! this_oop_map, "sanity"); ! } ! } ! ! ! void ClassFileParser::set_precomputed_flags(InstanceKlass* ik) { assert(ik != NULL, "invariant"); const Klass* const super = ik->super(); // Check if this klass has an empty finalize method (i.e. one with return bytecode only), --- 4623,4671 ---- (nonstatic_fields_end - nonstatic_fields_start) / heapOopSize; int instance_size = align_object_size(instance_end / wordSize); assert(instance_size == align_object_size(align_up( ! (instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize) ! + initial_value_type_padding, wordSize) / wordSize), "consistent layout helper value"); ! // Invariant: nonstatic_field end/start should only change if there are // nonstatic fields in the class, or if the class is contended. We compare // against the non-aligned value, so that end alignment will not fail the // assert without actually having the fields. assert((notaligned_nonstatic_fields_end == nonstatic_fields_start) || is_contended_class || (nonstatic_fields_count > 0), "double-check nonstatic start/end"); // Number of non-static oop map blocks allocated at end of klass. ! nonstatic_oop_maps->compact(THREAD); #ifndef PRODUCT ! if ((PrintFieldLayout && !is_value_type()) || ! (PrintValueLayout && (is_value_type() || has_nonstatic_value_fields))) { print_field_layout(_class_name, _fields, cp, instance_size, nonstatic_fields_start, nonstatic_fields_end, static_fields_end); + nonstatic_oop_maps->print_on(tty); + tty->print("\n"); } #endif // Pass back information needed for InstanceKlass creation ! info->oop_map_blocks = nonstatic_oop_maps; info->instance_size = instance_size; info->static_field_size = static_field_size; info->nonstatic_field_size = nonstatic_field_size; info->has_nonstatic_fields = has_nonstatic_fields; } ! void ClassFileParser::set_precomputed_flags(InstanceKlass* ik, TRAPS) { assert(ik != NULL, "invariant"); const Klass* const super = ik->super(); // Check if this klass has an empty finalize method (i.e. one with return bytecode only),
*** 4440,4449 **** --- 4694,4707 ---- #endif // Check if this klass supports the java.lang.Cloneable interface if (SystemDictionary::Cloneable_klass_loaded()) { if (ik->is_subtype_of(SystemDictionary::Cloneable_klass())) { + if (ik->is_value()) { + throwValueTypeLimitation(THREAD_AND_LOCATION, "Value Types do not support Cloneable"); + return; + } ik->set_is_cloneable(); } } // Check if this klass has a vanilla default constructor
*** 4480,4489 **** --- 4738,4752 ---- const jint lh = Klass::instance_layout_helper(ik->size_helper(), true); ik->set_layout_helper(lh); } } + bool ClassFileParser::supports_value_types() const { + // Value types are only supported by class file version 55 and later + return _major_version >= JAVA_11_VERSION; + } + // utility methods for appending an array with check for duplicates static void append_interfaces(GrowableArray<InstanceKlass*>* result, const Array<InstanceKlass*>* const ifs) { // iterate over new interfaces
*** 4742,4762 **** --- 5005,5037 ---- // utility methods for format checking void ClassFileParser::verify_legal_class_modifiers(jint flags, TRAPS) const { const bool is_module = (flags & JVM_ACC_MODULE) != 0; + const bool is_value_type = (flags & JVM_ACC_VALUE) != 0; assert(_major_version >= JAVA_9_VERSION || !is_module, "JVM_ACC_MODULE should not be set"); + assert(supports_value_types() || !is_value_type, "JVM_ACC_VALUE should not be set"); if (is_module) { ResourceMark rm(THREAD); Exceptions::fthrow( THREAD_AND_LOCATION, vmSymbols::java_lang_NoClassDefFoundError(), "%s is not a class because access_flag ACC_MODULE is set", _class_name->as_C_string()); return; } + if (is_value_type && !EnableValhalla) { + ResourceMark rm(THREAD); + Exceptions::fthrow( + THREAD_AND_LOCATION, + vmSymbols::java_lang_ClassFormatError(), + "Class modifier ACC_VALUE in class %s requires option -XX:+EnableValhalla", + _class_name->as_C_string() + ); + } + if (!_need_verify) { return; } const bool is_interface = (flags & JVM_ACC_INTERFACE) != 0; const bool is_abstract = (flags & JVM_ACC_ABSTRACT) != 0; const bool is_final = (flags & JVM_ACC_FINAL) != 0;
*** 4766,4776 **** const bool major_gte_15 = _major_version >= JAVA_1_5_VERSION; if ((is_abstract && is_final) || (is_interface && !is_abstract) || (is_interface && major_gte_15 && (is_super || is_enum)) || ! (!is_interface && major_gte_15 && is_annotation)) { ResourceMark rm(THREAD); Exceptions::fthrow( THREAD_AND_LOCATION, vmSymbols::java_lang_ClassFormatError(), "Illegal class modifiers in class %s: 0x%X", --- 5041,5052 ---- const bool major_gte_15 = _major_version >= JAVA_1_5_VERSION; if ((is_abstract && is_final) || (is_interface && !is_abstract) || (is_interface && major_gte_15 && (is_super || is_enum)) || ! (!is_interface && major_gte_15 && is_annotation) || ! (is_value_type && (is_interface || is_abstract || is_enum || !is_final))) { ResourceMark rm(THREAD); Exceptions::fthrow( THREAD_AND_LOCATION, vmSymbols::java_lang_ClassFormatError(), "Illegal class modifiers in class %s: 0x%X",
*** 4849,4858 **** --- 5125,5135 ---- } } void ClassFileParser::verify_legal_field_modifiers(jint flags, bool is_interface, + bool is_value_type, TRAPS) const { if (!_need_verify) { return; } const bool is_public = (flags & JVM_ACC_PUBLIC) != 0; const bool is_protected = (flags & JVM_ACC_PROTECTED) != 0;
*** 4873,4882 **** --- 5150,5163 ---- is_illegal = true; } } else { // not interface if (has_illegal_visibility(flags) || (is_final && is_volatile)) { is_illegal = true; + } else { + if (is_value_type && !is_static && !is_final) { + is_illegal = true; + } } } if (is_illegal) { ResourceMark rm(THREAD);
*** 4889,4898 **** --- 5170,5180 ---- } } void ClassFileParser::verify_legal_method_modifiers(jint flags, bool is_interface, + bool is_value_type, const Symbol* name, TRAPS) const { if (!_need_verify) { return; } const bool is_public = (flags & JVM_ACC_PUBLIC) != 0;
*** 4948,4966 **** --- 5230,5252 ---- if (is_static || is_final || is_synchronized || is_native || is_abstract || (major_gte_15 && is_bridge)) { is_illegal = true; } } else { // not initializer + if (is_value_type && is_synchronized && !is_static) { + is_illegal = true; + } else { if (is_abstract) { if ((is_final || is_native || is_private || is_static || (major_gte_15 && (is_synchronized || is_strict)))) { is_illegal = true; } } } } } + } if (is_illegal) { ResourceMark rm(THREAD); Exceptions::fthrow( THREAD_AND_LOCATION,
*** 5120,5141 **** case JVM_SIGNATURE_INT: case JVM_SIGNATURE_FLOAT: case JVM_SIGNATURE_LONG: case JVM_SIGNATURE_DOUBLE: return signature + 1; ! case JVM_SIGNATURE_CLASS: { if (_major_version < JAVA_1_5_VERSION) { // Skip over the class name if one is there const char* const p = skip_over_field_name(signature + 1, true, --length); // The next character better be a semicolon if (p && (p - signature) > 1 && p[0] == ';') { return p + 1; } } else { ! // Skip leading 'L' and ignore first appearance of ';' signature++; const char* c = (const char*) memchr(signature, ';', length - 1); // Format check signature if (c != NULL) { int newlen = c - (char*) signature; --- 5406,5436 ---- case JVM_SIGNATURE_INT: case JVM_SIGNATURE_FLOAT: case JVM_SIGNATURE_LONG: case JVM_SIGNATURE_DOUBLE: return signature + 1; ! case JVM_SIGNATURE_VALUETYPE: ! // Can't enable this check until JDK upgrades the bytecode generators ! // if (_major_version < CONSTANT_CLASS_DESCRIPTORS ) { ! // classfile_parse_error("Class name contains illegal Q-signature " ! // "in descriptor in class file %s", ! // CHECK_0); ! // } ! // fall through ! case JVM_SIGNATURE_CLASS: ! { if (_major_version < JAVA_1_5_VERSION) { // Skip over the class name if one is there const char* const p = skip_over_field_name(signature + 1, true, --length); // The next character better be a semicolon if (p && (p - signature) > 1 && p[0] == ';') { return p + 1; } } else { ! // Skip leading 'L' or 'Q' and ignore first appearance of ';' signature++; const char* c = (const char*) memchr(signature, ';', length - 1); // Format check signature if (c != NULL) { int newlen = c - (char*) signature;
*** 5186,5195 **** --- 5481,5493 ---- } else if (_major_version < JAVA_1_5_VERSION) { if (bytes[0] != '<') { p = skip_over_field_name(bytes, true, length); legal = (p != NULL) && ((p - bytes) == (int)length); } + } else if (_major_version >= CONSTANT_CLASS_DESCRIPTORS && bytes[length - 1] == ';' ) { + // Support for L...; and Q...; descriptors + legal = verify_unqualified_name(bytes + 1, length - 2, LegalClass); } else { // 4900761: relax the constraints based on JSR202 spec // Class names may be drawn from the entire Unicode character set. // Identifiers between '/' must be unqualified names. // The utf8 string has been verified when parsing cpool entries.
*** 5360,5370 **** return _field_info->static_field_size; } int ClassFileParser::total_oop_map_count() const { assert(_field_info != NULL, "invariant"); ! return _field_info->total_oop_map_count; } jint ClassFileParser::layout_size() const { assert(_field_info != NULL, "invariant"); return _field_info->instance_size; --- 5658,5668 ---- return _field_info->static_field_size; } int ClassFileParser::total_oop_map_count() const { assert(_field_info != NULL, "invariant"); ! return _field_info->oop_map_blocks->nonstatic_oop_map_count; } jint ClassFileParser::layout_size() const { assert(_field_info != NULL, "invariant"); return _field_info->instance_size;
*** 5490,5499 **** --- 5788,5803 ---- log_info(class, fingerprint)("%s : expected = " PTR64_FORMAT " actual = " PTR64_FORMAT, ik->external_name(), aot_fp, _stream->compute_fingerprint()); } } + if (ik->is_value()) { + ValueKlass* vk = ValueKlass::cast(ik); + oop val = ik->allocate_instance(CHECK_NULL); + vk->set_default_value(val); + } + return ik; } void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loadhook, TRAPS) { assert(ik != NULL, "invariant");
*** 5510,5520 **** set_klass_to_deallocate(ik); assert(_field_info != NULL, "invariant"); assert(ik->static_field_size() == _field_info->static_field_size, "sanity"); ! assert(ik->nonstatic_oop_map_count() == _field_info->total_oop_map_count, "sanity"); assert(ik->is_instance_klass(), "sanity"); assert(ik->size_helper() == _field_info->instance_size, "sanity"); --- 5814,5824 ---- set_klass_to_deallocate(ik); assert(_field_info != NULL, "invariant"); assert(ik->static_field_size() == _field_info->static_field_size, "sanity"); ! assert(ik->nonstatic_oop_map_count() == _field_info->oop_map_blocks->nonstatic_oop_map_count, "sanity"); assert(ik->is_instance_klass(), "sanity"); assert(ik->size_helper() == _field_info->instance_size, "sanity");
*** 5523,5533 **** // Not yet: supers are done below to support the new subtype-checking fields ik->set_nonstatic_field_size(_field_info->nonstatic_field_size); ik->set_has_nonstatic_fields(_field_info->has_nonstatic_fields); assert(_fac != NULL, "invariant"); ! ik->set_static_oop_field_count(_fac->count[STATIC_OOP]); // this transfers ownership of a lot of arrays from // the parser onto the InstanceKlass* apply_parsed_class_metadata(ik, _java_fields_count, CHECK); --- 5827,5837 ---- // Not yet: supers are done below to support the new subtype-checking fields ik->set_nonstatic_field_size(_field_info->nonstatic_field_size); ik->set_has_nonstatic_fields(_field_info->has_nonstatic_fields); assert(_fac != NULL, "invariant"); ! ik->set_static_oop_field_count(_fac->count[STATIC_OOP] + _fac->count[STATIC_FLATTENABLE]); // this transfers ownership of a lot of arrays from // the parser onto the InstanceKlass* apply_parsed_class_metadata(ik, _java_fields_count, CHECK);
*** 5611,5627 **** // Initialize itable offset tables klassItable::setup_itable_offset_table(ik); // Compute transitive closure of interfaces this class implements // Do final class setup ! fill_oop_maps(ik, ! _field_info->nonstatic_oop_map_count, ! _field_info->nonstatic_oop_offsets, ! _field_info->nonstatic_oop_counts); // Fill in has_finalizer, has_vanilla_constructor, and layout_helper ! set_precomputed_flags(ik); // check if this class can access its super class check_super_class_access(ik, CHECK); // check if this class can access its superinterfaces --- 5915,5931 ---- // Initialize itable offset tables klassItable::setup_itable_offset_table(ik); // Compute transitive closure of interfaces this class implements // Do final class setup ! OopMapBlocksBuilder* oop_map_blocks = _field_info->oop_map_blocks; ! if (oop_map_blocks->nonstatic_oop_map_count > 0) { ! oop_map_blocks->copy(ik->start_of_nonstatic_oop_maps()); ! } // Fill in has_finalizer, has_vanilla_constructor, and layout_helper ! set_precomputed_flags(ik, CHECK); // check if this class can access its super class check_super_class_access(ik, CHECK); // check if this class can access its superinterfaces
*** 5667,5676 **** --- 5971,6003 ---- // We won a potential race JvmtiExport::add_default_read_edges(module_handle, THREAD); } } + int nfields = ik->java_fields_count(); + if (ik->is_value()) nfields++; + for (int i = 0; i < nfields; i++) { + if (ik->field_access_flags(i) & JVM_ACC_FLATTENABLE) { + Symbol* klass_name = ik->field_signature(i)->fundamental_name(CHECK); + // Value classes must have been pre-loaded + Klass* klass = SystemDictionary::find(klass_name, + Handle(THREAD, ik->class_loader()), + Handle(THREAD, ik->protection_domain()), CHECK); + assert(klass != NULL, "Sanity check"); + assert(klass->access_flags().is_value_type(), "Value type expected"); + ik->set_value_field_klass(i, klass); + klass_name->decrement_refcount(); + } else if (is_value_type() && ((ik->field_access_flags(i) & JVM_ACC_FIELD_INTERNAL) != 0) + && ((ik->field_access_flags(i) & JVM_ACC_STATIC) != 0)) { + ValueKlass::cast(ik)->set_default_value_offset(ik->field_offset(i)); + } + } + + if (is_value_type()) { + ValueKlass::cast(ik)->initialize_calling_convention(CHECK); + } + ClassLoadingService::notify_class_loaded(ik, false /* not shared class */); if (!is_internal()) { if (log_is_enabled(Info, class, load)) { ResourceMark rm;
*** 5856,5865 **** --- 6183,6193 ---- _need_verify(false), _relax_verify(false), _has_nonstatic_concrete_methods(false), _declares_nonstatic_concrete_methods(false), _has_final_method(false), + _has_flattenable_fields(false), _has_finalizer(false), _has_empty_finalizer(false), _has_vanilla_constructor(false), _max_bootstrap_specifier_index(-1) {
*** 6051,6069 **** assert(cp_size == (const u2)cp->length(), "invariant"); // ACCESS FLAGS stream->guarantee_more(8, CHECK); // flags, this_class, super_class, infs_len ! // Access flags ! jint flags; // JVM_ACC_MODULE is defined in JDK-9 and later. if (_major_version >= JAVA_9_VERSION) { ! flags = stream->get_u2_fast() & (JVM_RECOGNIZED_CLASS_MODIFIERS | JVM_ACC_MODULE); ! } else { ! flags = stream->get_u2_fast() & JVM_RECOGNIZED_CLASS_MODIFIERS; } if ((flags & JVM_ACC_INTERFACE) && _major_version < JAVA_6_VERSION) { // Set abstract bit for old class files for backward compatibility flags |= JVM_ACC_ABSTRACT; } --- 6379,6401 ---- assert(cp_size == (const u2)cp->length(), "invariant"); // ACCESS FLAGS stream->guarantee_more(8, CHECK); // flags, this_class, super_class, infs_len ! jint recognized_modifiers = JVM_RECOGNIZED_CLASS_MODIFIERS; // JVM_ACC_MODULE is defined in JDK-9 and later. if (_major_version >= JAVA_9_VERSION) { ! recognized_modifiers |= JVM_ACC_MODULE; ! } ! // JVM_ACC_VALUE is defined for class file version 55 and later ! if (supports_value_types()) { ! recognized_modifiers |= JVM_ACC_VALUE; } + // Access flags + jint flags = stream->get_u2_fast() & recognized_modifiers; + if ((flags & JVM_ACC_INTERFACE) && _major_version < JAVA_6_VERSION) { // Set abstract bit for old class files for backward compatibility flags |= JVM_ACC_ABSTRACT; }
*** 6194,6203 **** --- 6526,6536 ---- // Fields (offsets are filled in later) _fac = new FieldAllocationCount(); parse_fields(stream, _access_flags.is_interface(), + _access_flags.is_value_type(), _fac, cp, cp_size, &_java_fields_count, CHECK);
*** 6206,6215 **** --- 6539,6549 ---- // Methods AccessFlags promoted_flags; parse_methods(stream, _access_flags.is_interface(), + _access_flags.is_value_type(), &promoted_flags, &_has_final_method, &_declares_nonstatic_concrete_methods, CHECK);
*** 6287,6296 **** --- 6621,6638 ---- _class_name->as_klass_external_name(), _super_klass->external_name() ); return; } + + // For a value class, only java/lang/Object is an acceptable super class + if (_access_flags.get_flags() & JVM_ACC_VALUE) { + guarantee_property(_super_klass->name() == vmSymbols::java_lang_Object(), + "Value type must have java.lang.Object as superclass in class file %s", + CHECK); + } + // Make sure super class is not final if (_super_klass->is_final()) { THROW_MSG(vmSymbols::java_lang_VerifyError(), "Cannot inherit from final class"); } }
*** 6327,6336 **** --- 6669,6691 ---- klassItable::compute_itable_size(_transitive_interfaces); assert(_fac != NULL, "invariant"); assert(_parsed_annotations != NULL, "invariant"); + + for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) { + if (fs.is_flattenable()) { + // Pre-load value class + Klass* klass = SystemDictionary::resolve_flattenable_field_or_fail(&fs, + Handle(THREAD, _loader_data->class_loader()), + _protection_domain, true, CHECK); + assert(klass != NULL, "Sanity check"); + assert(klass->access_flags().is_value_type(), "Value type expected"); + _has_flattenable_fields = true; + } + } + _field_info = new FieldLayoutInfo(); layout_fields(cp, _fac, _parsed_annotations, _field_info, CHECK); // Compute reference typ _rt = (NULL ==_super_klass) ? REF_NONE : _super_klass->reference_type();
*** 6364,6373 **** --- 6719,6729 ---- const ClassFileStream* ClassFileParser::clone_stream() const { assert(_stream != NULL, "invariant"); return _stream->clone(); } + // ---------------------------------------------------------------------------- // debugging #ifdef ASSERT
< prev index next >