< prev index next >

src/share/vm/classfile/classFileParser.cpp

Print this page
rev 9088 : 8139040: Fix initializations before ShouldNotReachHere() etc. and enable -Wuninitialized on linux.
Reviewed-by: stuefe, coleenp


3184  public:
3185   int*          nonstatic_oop_offsets;
3186   unsigned int* nonstatic_oop_counts;
3187   unsigned int  nonstatic_oop_map_count;
3188   unsigned int  total_oop_map_count;
3189   int           instance_size;
3190   int           nonstatic_field_size;
3191   int           static_field_size;
3192   bool          has_nonstatic_fields;
3193 };
3194 
3195 // Layout fields and fill in FieldLayoutInfo.  Could use more refactoring!
3196 void ClassFileParser::layout_fields(Handle class_loader,
3197                                     FieldAllocationCount* fac,
3198                                     ClassAnnotationCollector* parsed_annotations,
3199                                     FieldLayoutInfo* info,
3200                                     TRAPS) {
3201 
3202   // Field size and offset computation
3203   int nonstatic_field_size = _super_klass() == NULL ? 0 : _super_klass()->nonstatic_field_size();
3204   int next_static_oop_offset;
3205   int next_static_double_offset;
3206   int next_static_word_offset;
3207   int next_static_short_offset;
3208   int next_static_byte_offset;
3209   int next_nonstatic_oop_offset;
3210   int next_nonstatic_double_offset;
3211   int next_nonstatic_word_offset;
3212   int next_nonstatic_short_offset;
3213   int next_nonstatic_byte_offset;
3214   int first_nonstatic_oop_offset;
3215   int next_nonstatic_field_offset;
3216   int next_nonstatic_padded_offset;
3217 
3218   // Count the contended fields by type.
3219   //
3220   // We ignore static fields, because @Contended is not supported for them.
3221   // The layout code below will also ignore the static fields.
3222   int nonstatic_contended_count = 0;
3223   FieldAllocationCount fac_contended;
3224   for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3225     FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3226     if (fs.is_contended()) {
3227       fac_contended.count[atype]++;
3228       if (!fs.access_flags().is_static()) {
3229         nonstatic_contended_count++;
3230       }
3231     }
3232   }
3233 
3234 
3235   // Calculate the starting byte offsets
3236   next_static_oop_offset      = InstanceMirrorKlass::offset_of_static_fields();


3353       int next_offset = last_map->offset() + (last_map->count() * heapOopSize);
3354       if (next_offset == next_nonstatic_field_offset) {
3355         allocation_style = 0;   // allocate oops first
3356         next_nonstatic_oop_offset    = next_nonstatic_field_offset;
3357         next_nonstatic_double_offset = next_nonstatic_oop_offset +
3358                                        (nonstatic_oop_count * heapOopSize);
3359       }
3360     }
3361     if( allocation_style == 2 ) {
3362       allocation_style = 1;     // allocate oops last
3363       next_nonstatic_double_offset = next_nonstatic_field_offset;
3364     }
3365   } else {
3366     ShouldNotReachHere();
3367   }
3368 
3369   int nonstatic_oop_space_count   = 0;
3370   int nonstatic_word_space_count  = 0;
3371   int nonstatic_short_space_count = 0;
3372   int nonstatic_byte_space_count  = 0;
3373   int nonstatic_oop_space_offset;
3374   int nonstatic_word_space_offset;
3375   int nonstatic_short_space_offset;
3376   int nonstatic_byte_space_offset;
3377 
3378   // Try to squeeze some of the fields into the gaps due to
3379   // long/double alignment.
3380   if( nonstatic_double_count > 0 ) {
3381     int offset = next_nonstatic_double_offset;
3382     next_nonstatic_double_offset = align_size_up(offset, BytesPerLong);
3383     if( compact_fields && offset != next_nonstatic_double_offset ) {
3384       // Allocate available fields into the gap before double field.
3385       int length = next_nonstatic_double_offset - offset;
3386       assert(length == BytesPerInt, "");
3387       nonstatic_word_space_offset = offset;
3388       if( nonstatic_word_count > 0 ) {
3389         nonstatic_word_count      -= 1;
3390         nonstatic_word_space_count = 1; // Only one will fit
3391         length -= BytesPerInt;
3392         offset += BytesPerInt;
3393       }
3394       nonstatic_short_space_offset = offset;
3395       while( length >= BytesPerShort && nonstatic_short_count > 0 ) {
3396         nonstatic_short_count       -= 1;


3428   // let oops jump before padding with this allocation style
3429   if( allocation_style == 1 ) {
3430     next_nonstatic_oop_offset = next_nonstatic_padded_offset;
3431     if( nonstatic_oop_count > 0 ) {
3432       next_nonstatic_oop_offset = align_size_up(next_nonstatic_oop_offset, heapOopSize);
3433     }
3434     next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
3435   }
3436 
3437   // Iterate over fields again and compute correct offsets.
3438   // The field allocation type was temporarily stored in the offset slot.
3439   // oop fields are located before non-oop fields (static and non-static).
3440   for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3441 
3442     // skip already laid out fields
3443     if (fs.is_offset_set()) continue;
3444 
3445     // contended instance fields are handled below
3446     if (fs.is_contended() && !fs.access_flags().is_static()) continue;
3447 
3448     int real_offset;
3449     FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3450 
3451     // pack the rest of the fields
3452     switch (atype) {
3453       case STATIC_OOP:
3454         real_offset = next_static_oop_offset;
3455         next_static_oop_offset += heapOopSize;
3456         break;
3457       case STATIC_BYTE:
3458         real_offset = next_static_byte_offset;
3459         next_static_byte_offset += 1;
3460         break;
3461       case STATIC_SHORT:
3462         real_offset = next_static_short_offset;
3463         next_static_short_offset += BytesPerShort;
3464         break;
3465       case STATIC_WORD:
3466         real_offset = next_static_word_offset;
3467         next_static_word_offset += BytesPerInt;
3468         break;


3562 
3563       if (fs.is_contended()) {
3564         bm.set_bit(fs.contended_group());
3565       }
3566     }
3567 
3568     int current_group = -1;
3569     while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
3570 
3571       for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3572 
3573         // skip already laid out fields
3574         if (fs.is_offset_set()) continue;
3575 
3576         // skip non-contended fields and fields from different group
3577         if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
3578 
3579         // handle statics below
3580         if (fs.access_flags().is_static()) continue;
3581 
3582         int real_offset;
3583         FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3584 
3585         switch (atype) {
3586           case NONSTATIC_BYTE:
3587             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, 1);
3588             real_offset = next_nonstatic_padded_offset;
3589             next_nonstatic_padded_offset += 1;
3590             break;
3591 
3592           case NONSTATIC_SHORT:
3593             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerShort);
3594             real_offset = next_nonstatic_padded_offset;
3595             next_nonstatic_padded_offset += BytesPerShort;
3596             break;
3597 
3598           case NONSTATIC_WORD:
3599             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerInt);
3600             real_offset = next_nonstatic_padded_offset;
3601             next_nonstatic_padded_offset += BytesPerInt;
3602             break;




3184  public:
3185   int*          nonstatic_oop_offsets;
3186   unsigned int* nonstatic_oop_counts;
3187   unsigned int  nonstatic_oop_map_count;
3188   unsigned int  total_oop_map_count;
3189   int           instance_size;
3190   int           nonstatic_field_size;
3191   int           static_field_size;
3192   bool          has_nonstatic_fields;
3193 };
3194 
3195 // Layout fields and fill in FieldLayoutInfo.  Could use more refactoring!
3196 void ClassFileParser::layout_fields(Handle class_loader,
3197                                     FieldAllocationCount* fac,
3198                                     ClassAnnotationCollector* parsed_annotations,
3199                                     FieldLayoutInfo* info,
3200                                     TRAPS) {
3201 
3202   // Field size and offset computation
3203   int nonstatic_field_size = _super_klass() == NULL ? 0 : _super_klass()->nonstatic_field_size();
3204   int next_static_oop_offset = 0;
3205   int next_static_double_offset = 0;
3206   int next_static_word_offset = 0;
3207   int next_static_short_offset = 0;
3208   int next_static_byte_offset = 0;
3209   int next_nonstatic_oop_offset = 0;
3210   int next_nonstatic_double_offset = 0;
3211   int next_nonstatic_word_offset = 0;
3212   int next_nonstatic_short_offset = 0;
3213   int next_nonstatic_byte_offset = 0;
3214   int first_nonstatic_oop_offset = 0;
3215   int next_nonstatic_field_offset = 0;
3216   int next_nonstatic_padded_offset = 0;
3217 
3218   // Count the contended fields by type.
3219   //
3220   // We ignore static fields, because @Contended is not supported for them.
3221   // The layout code below will also ignore the static fields.
3222   int nonstatic_contended_count = 0;
3223   FieldAllocationCount fac_contended;
3224   for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3225     FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3226     if (fs.is_contended()) {
3227       fac_contended.count[atype]++;
3228       if (!fs.access_flags().is_static()) {
3229         nonstatic_contended_count++;
3230       }
3231     }
3232   }
3233 
3234 
3235   // Calculate the starting byte offsets
3236   next_static_oop_offset      = InstanceMirrorKlass::offset_of_static_fields();


3353       int next_offset = last_map->offset() + (last_map->count() * heapOopSize);
3354       if (next_offset == next_nonstatic_field_offset) {
3355         allocation_style = 0;   // allocate oops first
3356         next_nonstatic_oop_offset    = next_nonstatic_field_offset;
3357         next_nonstatic_double_offset = next_nonstatic_oop_offset +
3358                                        (nonstatic_oop_count * heapOopSize);
3359       }
3360     }
3361     if( allocation_style == 2 ) {
3362       allocation_style = 1;     // allocate oops last
3363       next_nonstatic_double_offset = next_nonstatic_field_offset;
3364     }
3365   } else {
3366     ShouldNotReachHere();
3367   }
3368 
3369   int nonstatic_oop_space_count    = 0;
3370   int nonstatic_word_space_count   = 0;
3371   int nonstatic_short_space_count  = 0;
3372   int nonstatic_byte_space_count   = 0;
3373   int nonstatic_oop_space_offset   = 0;
3374   int nonstatic_word_space_offset  = 0;
3375   int nonstatic_short_space_offset = 0;
3376   int nonstatic_byte_space_offset  = 0;
3377 
3378   // Try to squeeze some of the fields into the gaps due to
3379   // long/double alignment.
3380   if( nonstatic_double_count > 0 ) {
3381     int offset = next_nonstatic_double_offset;
3382     next_nonstatic_double_offset = align_size_up(offset, BytesPerLong);
3383     if( compact_fields && offset != next_nonstatic_double_offset ) {
3384       // Allocate available fields into the gap before double field.
3385       int length = next_nonstatic_double_offset - offset;
3386       assert(length == BytesPerInt, "");
3387       nonstatic_word_space_offset = offset;
3388       if( nonstatic_word_count > 0 ) {
3389         nonstatic_word_count      -= 1;
3390         nonstatic_word_space_count = 1; // Only one will fit
3391         length -= BytesPerInt;
3392         offset += BytesPerInt;
3393       }
3394       nonstatic_short_space_offset = offset;
3395       while( length >= BytesPerShort && nonstatic_short_count > 0 ) {
3396         nonstatic_short_count       -= 1;


3428   // let oops jump before padding with this allocation style
3429   if( allocation_style == 1 ) {
3430     next_nonstatic_oop_offset = next_nonstatic_padded_offset;
3431     if( nonstatic_oop_count > 0 ) {
3432       next_nonstatic_oop_offset = align_size_up(next_nonstatic_oop_offset, heapOopSize);
3433     }
3434     next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
3435   }
3436 
3437   // Iterate over fields again and compute correct offsets.
3438   // The field allocation type was temporarily stored in the offset slot.
3439   // oop fields are located before non-oop fields (static and non-static).
3440   for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3441 
3442     // skip already laid out fields
3443     if (fs.is_offset_set()) continue;
3444 
3445     // contended instance fields are handled below
3446     if (fs.is_contended() && !fs.access_flags().is_static()) continue;
3447 
3448     int real_offset = 0;
3449     FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3450 
3451     // pack the rest of the fields
3452     switch (atype) {
3453       case STATIC_OOP:
3454         real_offset = next_static_oop_offset;
3455         next_static_oop_offset += heapOopSize;
3456         break;
3457       case STATIC_BYTE:
3458         real_offset = next_static_byte_offset;
3459         next_static_byte_offset += 1;
3460         break;
3461       case STATIC_SHORT:
3462         real_offset = next_static_short_offset;
3463         next_static_short_offset += BytesPerShort;
3464         break;
3465       case STATIC_WORD:
3466         real_offset = next_static_word_offset;
3467         next_static_word_offset += BytesPerInt;
3468         break;


3562 
3563       if (fs.is_contended()) {
3564         bm.set_bit(fs.contended_group());
3565       }
3566     }
3567 
3568     int current_group = -1;
3569     while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
3570 
3571       for (AllFieldStream fs(_fields, _cp); !fs.done(); fs.next()) {
3572 
3573         // skip already laid out fields
3574         if (fs.is_offset_set()) continue;
3575 
3576         // skip non-contended fields and fields from different group
3577         if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
3578 
3579         // handle statics below
3580         if (fs.access_flags().is_static()) continue;
3581 
3582         int real_offset = 0;
3583         FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
3584 
3585         switch (atype) {
3586           case NONSTATIC_BYTE:
3587             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, 1);
3588             real_offset = next_nonstatic_padded_offset;
3589             next_nonstatic_padded_offset += 1;
3590             break;
3591 
3592           case NONSTATIC_SHORT:
3593             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerShort);
3594             real_offset = next_nonstatic_padded_offset;
3595             next_nonstatic_padded_offset += BytesPerShort;
3596             break;
3597 
3598           case NONSTATIC_WORD:
3599             next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerInt);
3600             real_offset = next_nonstatic_padded_offset;
3601             next_nonstatic_padded_offset += BytesPerInt;
3602             break;


< prev index next >