< prev index next >

src/share/vm/classfile/classFileParser.cpp

Print this page

        

@@ -3959,10 +3959,11 @@
   unsigned int nonstatic_byte_count   = fac->count[NONSTATIC_BYTE]   - fac_contended.count[NONSTATIC_BYTE];
   unsigned int nonstatic_oop_count    = fac->count[NONSTATIC_OOP]    - fac_contended.count[NONSTATIC_OOP];
 
   int static_value_type_count = 0;
   int nonstatic_value_type_count = 0;
+  int nonstatic_nonflattened_value_types_count = 0;
   int* nonstatic_value_type_indexes = NULL;
   Klass** nonstatic_value_type_klasses = NULL;
   unsigned int value_type_oop_map_count = 0;
 
   int max_nonstatic_value_type = fac->count[NONSTATIC_VALUETYPE] + 1;

@@ -3983,18 +3984,28 @@
       Klass* klass = SystemDictionary::resolve_or_fail(signature,
                                                        Handle(THREAD, _loader_data->class_loader()),
                                                        _protection_domain, true, CHECK);
       assert(klass != NULL, "Sanity check");
       assert(klass->access_flags().is_value_type(), "Value type expected");
+      ValueKlass* vk = ValueKlass::cast(klass);
+      // Conditions to apply flattening or not should be defined
+      //in a single place
+      if (vk->size_helper() <= ValueArrayElemMaxFlatSize) {
       nonstatic_value_type_indexes[nonstatic_value_type_count] = fs.index();
       nonstatic_value_type_klasses[nonstatic_value_type_count] = klass;
       nonstatic_value_type_count++;
 
       ValueKlass* vklass = ValueKlass::cast(klass);
       if (vklass->contains_oops()) {
         value_type_oop_map_count += vklass->nonstatic_oop_map_count();
       }
+        fs.set_flattening(true);
+      } else {
+        nonstatic_nonflattened_value_types_count++;
+        value_type_oop_map_count++;
+        fs.set_flattening(false);
+      }
     }
   }
 
   // Total non-static fields count, including every contended field
   unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] +

@@ -4073,22 +4084,22 @@
   // Rearrange fields for a given allocation style
   if( allocation_style == 0 ) {
     // Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields
     next_nonstatic_oop_offset    = next_nonstatic_field_offset;
     next_nonstatic_double_offset = next_nonstatic_oop_offset +
-                                    (nonstatic_oop_count * heapOopSize);
+                                    ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count) * heapOopSize);
   } else if( allocation_style == 1 ) {
     // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields
     next_nonstatic_double_offset = next_nonstatic_field_offset;
   } else if( allocation_style == 2 ) {
     // Fields allocation: oops fields in super and sub classes are together.
     if( nonstatic_field_size > 0 && super_oop_map_count > 0 ) {
       if (next_nonstatic_field_offset == nonstatic_oop_maps->last_oop_map()->end_offset()) {
         allocation_style = 0;   // allocate oops first
         next_nonstatic_oop_offset    = next_nonstatic_field_offset;
         next_nonstatic_double_offset = next_nonstatic_oop_offset +
-                                       (nonstatic_oop_count * heapOopSize);
+                                       ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count)* heapOopSize);
       }
     }
     if( allocation_style == 2 ) {
       allocation_style = 1;     // allocate oops last
       next_nonstatic_double_offset = next_nonstatic_field_offset;

@@ -4157,14 +4168,15 @@
                                        nonstatic_byte_count;
 
   // let oops jump before padding with this allocation style
   if( allocation_style == 1 ) {
     next_nonstatic_oop_offset = next_nonstatic_padded_offset;
-    if( nonstatic_oop_count > 0 ) {
+    if( (nonstatic_oop_count + nonstatic_nonflattened_value_types_count) > 0 ) {
       next_nonstatic_oop_offset = align_up(next_nonstatic_oop_offset, heapOopSize);
     }
-    next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
+    next_nonstatic_padded_offset = next_nonstatic_oop_offset
+        + ((nonstatic_oop_count + nonstatic_nonflattened_value_types_count) * heapOopSize);
   }
 
   // Aligning embedded value types
   // bug below, the current algorithm to layout embedded value types always put them at the
   // end of the layout, which doesn't match the different allocation policies the VM is

@@ -4212,11 +4224,11 @@
       case STATIC_DOUBLE:
         real_offset = next_static_double_offset;
         next_static_double_offset += BytesPerLong;
         break;
       case NONSTATIC_VALUETYPE:
-      {
+        if (fs.is_flatten()) {
         Klass* klass = nonstatic_value_type_klasses[next_value_type_index];
         assert(klass != NULL, "Klass should have been loaded and resolved earlier");
         assert(klass->access_flags().is_value_type(),"Must be a value type");
         ValueKlass* vklass = ValueKlass::cast(klass);
         real_offset = next_nonstatic_valuetype_offset;

@@ -4232,12 +4244,14 @@
           while (map < last_map) {
             nonstatic_oop_maps->add(map->offset() + diff, map->count());
             map++;
           }
         }
-      }
       break;
+        } else {
+          // Fall through
+        }
       case NONSTATIC_OOP:
         if( nonstatic_oop_space_count > 0 ) {
           real_offset = nonstatic_oop_space_offset;
           nonstatic_oop_space_offset += heapOopSize;
           nonstatic_oop_space_count  -= 1;

@@ -4402,11 +4416,11 @@
     assert(!is_value_type() && !is_value_capable_class(), "@Contended not supported for value types yet");
     next_nonstatic_padded_offset += ContendedPaddingWidth;
   }
 
   int notaligned_nonstatic_fields_end;
-  if (nonstatic_value_type_count != 0) {
+  if ((nonstatic_value_type_count - nonstatic_nonflattened_value_types_count) != 0) {
     notaligned_nonstatic_fields_end = next_nonstatic_valuetype_offset;
   } else {
     notaligned_nonstatic_fields_end = next_nonstatic_padded_offset;
   }
 
< prev index next >