< prev index next >

src/share/vm/classfile/classFileParser.cpp

Print this page

        

*** 3959,3968 **** --- 3959,3969 ---- unsigned int nonstatic_byte_count = fac->count[NONSTATIC_BYTE] - fac_contended.count[NONSTATIC_BYTE]; unsigned int nonstatic_oop_count = fac->count[NONSTATIC_OOP] - fac_contended.count[NONSTATIC_OOP]; int static_value_type_count = 0; int nonstatic_value_type_count = 0; + int nonstatic_nonflattened_value_types_count = 0; int* nonstatic_value_type_indexes = NULL; Klass** nonstatic_value_type_klasses = NULL; unsigned int value_type_oop_map_count = 0; int max_nonstatic_value_type = fac->count[NONSTATIC_VALUETYPE] + 1;
*** 3983,4000 **** --- 3984,4011 ---- Klass* klass = SystemDictionary::resolve_or_fail(signature, Handle(THREAD, _loader_data->class_loader()), _protection_domain, true, CHECK); assert(klass != NULL, "Sanity check"); assert(klass->access_flags().is_value_type(), "Value type expected"); + ValueKlass* vk = ValueKlass::cast(klass); + // Conditions to apply flattening or not should be defined + //in a single place + if (vk->size_helper() <= ValueArrayElemMaxFlatSize) { nonstatic_value_type_indexes[nonstatic_value_type_count] = fs.index(); nonstatic_value_type_klasses[nonstatic_value_type_count] = klass; nonstatic_value_type_count++; ValueKlass* vklass = ValueKlass::cast(klass); if (vklass->contains_oops()) { value_type_oop_map_count += vklass->nonstatic_oop_map_count(); } + fs.set_flattening(true); + } else { + nonstatic_nonflattened_value_types_count++; + value_type_oop_map_count++; + fs.set_flattening(false); + } } } // Total non-static fields count, including every contended field unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] +
*** 4073,4094 **** // Rearrange fields for a given allocation style if( allocation_style == 0 ) { // Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + ! (nonstatic_oop_count * heapOopSize); } else if( allocation_style == 1 ) { // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields next_nonstatic_double_offset = next_nonstatic_field_offset; } else if( allocation_style == 2 ) { // Fields allocation: oops fields in super and sub classes are together. if( nonstatic_field_size > 0 && super_oop_map_count > 0 ) { if (next_nonstatic_field_offset == nonstatic_oop_maps->last_oop_map()->end_offset()) { allocation_style = 0; // allocate oops first next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + ! (nonstatic_oop_count * heapOopSize); } } if( allocation_style == 2 ) { allocation_style = 1; // allocate oops last next_nonstatic_double_offset = next_nonstatic_field_offset; --- 4084,4105 ---- // Rearrange fields for a given allocation style if( allocation_style == 0 ) { // Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + ! ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count) * heapOopSize); } else if( allocation_style == 1 ) { // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields next_nonstatic_double_offset = next_nonstatic_field_offset; } else if( allocation_style == 2 ) { // Fields allocation: oops fields in super and sub classes are together. if( nonstatic_field_size > 0 && super_oop_map_count > 0 ) { if (next_nonstatic_field_offset == nonstatic_oop_maps->last_oop_map()->end_offset()) { allocation_style = 0; // allocate oops first next_nonstatic_oop_offset = next_nonstatic_field_offset; next_nonstatic_double_offset = next_nonstatic_oop_offset + ! ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count)* heapOopSize); } } if( allocation_style == 2 ) { allocation_style = 1; // allocate oops last next_nonstatic_double_offset = next_nonstatic_field_offset;
*** 4157,4170 **** nonstatic_byte_count; // let oops jump before padding with this allocation style if( allocation_style == 1 ) { next_nonstatic_oop_offset = next_nonstatic_padded_offset; ! if( nonstatic_oop_count > 0 ) { next_nonstatic_oop_offset = align_up(next_nonstatic_oop_offset, heapOopSize); } ! next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize); } // Aligning embedded value types // bug below, the current algorithm to layout embedded value types always put them at the // end of the layout, which doesn't match the different allocation policies the VM is --- 4168,4182 ---- nonstatic_byte_count; // let oops jump before padding with this allocation style if( allocation_style == 1 ) { next_nonstatic_oop_offset = next_nonstatic_padded_offset; ! if( (nonstatic_oop_count + nonstatic_nonflattened_value_types_count) > 0 ) { next_nonstatic_oop_offset = align_up(next_nonstatic_oop_offset, heapOopSize); } ! next_nonstatic_padded_offset = next_nonstatic_oop_offset ! + ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count) * heapOopSize); } // Aligning embedded value types // bug below, the current algorithm to layout embedded value types always put them at the // end of the layout, which doesn't match the different allocation policies the VM is
*** 4212,4222 **** case STATIC_DOUBLE: real_offset = next_static_double_offset; next_static_double_offset += BytesPerLong; break; case NONSTATIC_VALUETYPE: ! { Klass* klass = nonstatic_value_type_klasses[next_value_type_index]; assert(klass != NULL, "Klass should have been loaded and resolved earlier"); assert(klass->access_flags().is_value_type(),"Must be a value type"); ValueKlass* vklass = ValueKlass::cast(klass); real_offset = next_nonstatic_valuetype_offset; --- 4224,4234 ---- case STATIC_DOUBLE: real_offset = next_static_double_offset; next_static_double_offset += BytesPerLong; break; case NONSTATIC_VALUETYPE: ! if (fs.is_flatten()) { Klass* klass = nonstatic_value_type_klasses[next_value_type_index]; assert(klass != NULL, "Klass should have been loaded and resolved earlier"); assert(klass->access_flags().is_value_type(),"Must be a value type"); ValueKlass* vklass = ValueKlass::cast(klass); real_offset = next_nonstatic_valuetype_offset;
*** 4232,4243 **** while (map < last_map) { nonstatic_oop_maps->add(map->offset() + diff, map->count()); map++; } } - } break; case NONSTATIC_OOP: if( nonstatic_oop_space_count > 0 ) { real_offset = nonstatic_oop_space_offset; nonstatic_oop_space_offset += heapOopSize; nonstatic_oop_space_count -= 1; --- 4244,4257 ---- while (map < last_map) { nonstatic_oop_maps->add(map->offset() + diff, map->count()); map++; } } break; + } else { + // Fall through + } case NONSTATIC_OOP: if( nonstatic_oop_space_count > 0 ) { real_offset = nonstatic_oop_space_offset; nonstatic_oop_space_offset += heapOopSize; nonstatic_oop_space_count -= 1;
*** 4402,4412 **** assert(!is_value_type() && !is_value_capable_class(), "@Contended not supported for value types yet"); next_nonstatic_padded_offset += ContendedPaddingWidth; } int notaligned_nonstatic_fields_end; ! if (nonstatic_value_type_count != 0) { notaligned_nonstatic_fields_end = next_nonstatic_valuetype_offset; } else { notaligned_nonstatic_fields_end = next_nonstatic_padded_offset; } --- 4416,4426 ---- assert(!is_value_type() && !is_value_capable_class(), "@Contended not supported for value types yet"); next_nonstatic_padded_offset += ContendedPaddingWidth; } int notaligned_nonstatic_fields_end; ! if ((nonstatic_value_type_count - nonstatic_nonflattened_value_types_count) != 0) { notaligned_nonstatic_fields_end = next_nonstatic_valuetype_offset; } else { notaligned_nonstatic_fields_end = next_nonstatic_padded_offset; }
< prev index next >