< prev index next >

src/hotspot/share/classfile/classFileParser.cpp

Print this page




4011                    super_class_index,
4012                    CHECK_NULL);
4013     // The class name should be legal because it is checked when parsing constant pool.
4014     // However, make sure it is not an array type.
4015     bool is_array = false;
4016     if (cp->tag_at(super_class_index).is_klass()) {
4017       super_klass = InstanceKlass::cast(cp->resolved_klass_at(super_class_index));
4018       if (need_verify)
4019         is_array = super_klass->is_array_klass();
4020     } else if (need_verify) {
4021       is_array = (cp->klass_name_at(super_class_index)->char_at(0) == JVM_SIGNATURE_ARRAY);
4022     }
4023     if (need_verify) {
4024       guarantee_property(!is_array,
4025                         "Bad superclass name in class file %s", CHECK_NULL);
4026     }
4027   }
4028   return super_klass;
4029 }
4030 
4031 #ifndef PRODUCT
4032 static void print_field_layout(const Symbol* name,
4033                                Array<u2>* fields,
4034                                ConstantPool* cp,
4035                                int instance_size,
4036                                int instance_fields_start,
4037                                int instance_fields_end,
4038                                int static_fields_end) {
4039 
4040   assert(name != NULL, "invariant");
4041 
4042   tty->print("%s: field layout\n", name->as_klass_external_name());
4043   tty->print("  @%3d %s\n", instance_fields_start, "--- instance fields start ---");
4044   for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
4045     if (!fs.access_flags().is_static()) {
4046       tty->print("  @%3d \"%s\" %s\n",
4047         fs.offset(),
4048         fs.name()->as_klass_external_name(),
4049         fs.signature()->as_klass_external_name());
4050     }
4051   }
4052   tty->print("  @%3d %s\n", instance_fields_end, "--- instance fields end ---");
4053   tty->print("  @%3d %s\n", instance_size * wordSize, "--- instance ends ---");
4054   tty->print("  @%3d %s\n", InstanceMirrorKlass::offset_of_static_fields(), "--- static fields start ---");
4055   for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
4056     if (fs.access_flags().is_static()) {
4057       tty->print("  @%3d \"%s\" %s\n",
4058         fs.offset(),
4059         fs.name()->as_klass_external_name(),
4060         fs.signature()->as_klass_external_name());
4061     }
4062   }
4063   tty->print("  @%3d %s\n", static_fields_end, "--- static fields end ---");
4064   tty->print("\n");
4065 }
4066 #endif
4067 
4068 OopMapBlocksBuilder::OopMapBlocksBuilder(unsigned int max_blocks) {
4069   _max_nonstatic_oop_maps = max_blocks;
4070   _nonstatic_oop_map_count = 0;
4071   if (max_blocks == 0) {
4072     _nonstatic_oop_maps = NULL;
4073   } else {
4074     _nonstatic_oop_maps =
4075         NEW_RESOURCE_ARRAY(OopMapBlock, _max_nonstatic_oop_maps);
4076     memset(_nonstatic_oop_maps, 0, sizeof(OopMapBlock) * max_blocks);
4077   }
4078 }
4079 
4080 OopMapBlock* OopMapBlocksBuilder::last_oop_map() const {
4081   assert(_nonstatic_oop_map_count > 0, "Has no oop maps");
4082   return _nonstatic_oop_maps + (_nonstatic_oop_map_count - 1);
4083 }
4084 
4085 // addition of super oop maps
4086 void OopMapBlocksBuilder::initialize_inherited_blocks(OopMapBlock* blocks, unsigned int nof_blocks) {
4087   assert(nof_blocks && _nonstatic_oop_map_count == 0 &&


4164 }
4165 
4166 void OopMapBlocksBuilder::print_on(outputStream* st) const {
4167   st->print_cr("  OopMapBlocks: %3d  /%3d", _nonstatic_oop_map_count, _max_nonstatic_oop_maps);
4168   if (_nonstatic_oop_map_count > 0) {
4169     OopMapBlock* map = _nonstatic_oop_maps;
4170     OopMapBlock* last_map = last_oop_map();
4171     assert(map <= last_map, "Last less than first");
4172     while (map <= last_map) {
4173       st->print_cr("    Offset: %3d  -%3d Count: %3d", map->offset(),
4174                    map->offset() + map->offset_span() - heapOopSize, map->count());
4175       map++;
4176     }
4177   }
4178 }
4179 
4180 void OopMapBlocksBuilder::print_value_on(outputStream* st) const {
4181   print_on(st);
4182 }
4183 
4184 // Layout fields and fill in FieldLayoutInfo.  Could use more refactoring!
4185 void ClassFileParser::layout_fields(ConstantPool* cp,
4186                                     const FieldAllocationCount* fac,
4187                                     const ClassAnnotationCollector* parsed_annotations,
4188                                     FieldLayoutInfo* info,
4189                                     TRAPS) {
4190 
4191   assert(cp != NULL, "invariant");
4192 
4193   // Field size and offset computation
4194   int nonstatic_field_size = _super_klass == NULL ? 0 :
4195                                _super_klass->nonstatic_field_size();
4196 
4197   // Count the contended fields by type.
4198   //
4199   // We ignore static fields, because @Contended is not supported for them.
4200   // The layout code below will also ignore the static fields.
4201   int nonstatic_contended_count = 0;
4202   FieldAllocationCount fac_contended;
4203   for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) {
4204     FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
4205     if (fs.is_contended()) {
4206       fac_contended.count[atype]++;
4207       if (!fs.access_flags().is_static()) {
4208         nonstatic_contended_count++;
4209       }
4210     }
4211   }
4212 
4213 
4214   // Calculate the starting byte offsets
4215   int next_static_oop_offset    = InstanceMirrorKlass::offset_of_static_fields();
4216   int next_static_double_offset = next_static_oop_offset +
4217                                       ((fac->count[STATIC_OOP]) * heapOopSize);
4218   if (fac->count[STATIC_DOUBLE]) {
4219     next_static_double_offset = align_up(next_static_double_offset, BytesPerLong);
4220   }
4221 
4222   int next_static_word_offset   = next_static_double_offset +
4223                                     ((fac->count[STATIC_DOUBLE]) * BytesPerLong);
4224   int next_static_short_offset  = next_static_word_offset +
4225                                     ((fac->count[STATIC_WORD]) * BytesPerInt);
4226   int next_static_byte_offset   = next_static_short_offset +
4227                                   ((fac->count[STATIC_SHORT]) * BytesPerShort);
4228 
4229   int nonstatic_fields_start  = instanceOopDesc::base_offset_in_bytes() +
4230                                 nonstatic_field_size * heapOopSize;
4231 
4232   int next_nonstatic_field_offset = nonstatic_fields_start;
4233 
4234   const bool is_contended_class     = parsed_annotations->is_contended();
4235 
4236   // Class is contended, pad before all the fields
4237   if (is_contended_class) {
4238     next_nonstatic_field_offset += ContendedPaddingWidth;
4239   }
4240 
4241   // Compute the non-contended fields count.
4242   // The packing code below relies on these counts to determine if some field
4243   // can be squeezed into the alignment gap. Contended fields are obviously
4244   // exempt from that.
4245   unsigned int nonstatic_double_count = fac->count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE];
4246   unsigned int nonstatic_word_count   = fac->count[NONSTATIC_WORD]   - fac_contended.count[NONSTATIC_WORD];
4247   unsigned int nonstatic_short_count  = fac->count[NONSTATIC_SHORT]  - fac_contended.count[NONSTATIC_SHORT];
4248   unsigned int nonstatic_byte_count   = fac->count[NONSTATIC_BYTE]   - fac_contended.count[NONSTATIC_BYTE];
4249   unsigned int nonstatic_oop_count    = fac->count[NONSTATIC_OOP]    - fac_contended.count[NONSTATIC_OOP];
4250 
4251   // Total non-static fields count, including every contended field
4252   unsigned int nonstatic_fields_count = fac->count[NONSTATIC_DOUBLE] + fac->count[NONSTATIC_WORD] +
4253                                         fac->count[NONSTATIC_SHORT] + fac->count[NONSTATIC_BYTE] +
4254                                         fac->count[NONSTATIC_OOP];
4255 
4256   const bool super_has_nonstatic_fields =
4257           (_super_klass != NULL && _super_klass->has_nonstatic_fields());
4258   const bool has_nonstatic_fields =
4259     super_has_nonstatic_fields || (nonstatic_fields_count != 0);
4260 
4261 
4262   // Prepare list of oops for oop map generation.
4263   //
4264   // "offset" and "count" lists are describing the set of contiguous oop
4265   // regions. offset[i] is the start of the i-th region, which then has
4266   // count[i] oops following. Before we know how many regions are required,
4267   // we pessimistically allocate the maps to fit all the oops into the
4268   // distinct regions.
4269 
4270   int super_oop_map_count = (_super_klass == NULL) ? 0 :_super_klass->nonstatic_oop_map_count();
4271   int max_oop_map_count = super_oop_map_count + fac->count[NONSTATIC_OOP];
4272 
4273   OopMapBlocksBuilder* nonstatic_oop_maps = new OopMapBlocksBuilder(max_oop_map_count);
4274   if (super_oop_map_count > 0) {
4275     nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
4276                                                     _super_klass->nonstatic_oop_map_count());
4277   }
4278 
4279   int first_nonstatic_oop_offset = 0; // will be set for first oop field
4280 
4281   bool compact_fields  = true;
4282   bool allocate_oops_first = false;
4283 
4284   int next_nonstatic_oop_offset = 0;
4285   int next_nonstatic_double_offset = 0;
4286 
4287   // Rearrange fields for a given allocation style
4288   if (allocate_oops_first) {
4289     // Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields
4290     next_nonstatic_oop_offset    = next_nonstatic_field_offset;
4291     next_nonstatic_double_offset = next_nonstatic_oop_offset +
4292                                     (nonstatic_oop_count * heapOopSize);
4293   } else {
4294     // Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields
4295     next_nonstatic_double_offset = next_nonstatic_field_offset;
4296   }
4297 
4298   int nonstatic_oop_space_count   = 0;
4299   int nonstatic_word_space_count  = 0;
4300   int nonstatic_short_space_count = 0;
4301   int nonstatic_byte_space_count  = 0;
4302   int nonstatic_oop_space_offset = 0;
4303   int nonstatic_word_space_offset = 0;
4304   int nonstatic_short_space_offset = 0;
4305   int nonstatic_byte_space_offset = 0;
4306 
4307   // Try to squeeze some of the fields into the gaps due to
4308   // long/double alignment.
4309   if (nonstatic_double_count > 0) {
4310     int offset = next_nonstatic_double_offset;
4311     next_nonstatic_double_offset = align_up(offset, BytesPerLong);
4312     if (compact_fields && offset != next_nonstatic_double_offset) {
4313       // Allocate available fields into the gap before double field.
4314       int length = next_nonstatic_double_offset - offset;
4315       assert(length == BytesPerInt, "");
4316       nonstatic_word_space_offset = offset;
4317       if (nonstatic_word_count > 0) {
4318         nonstatic_word_count      -= 1;
4319         nonstatic_word_space_count = 1; // Only one will fit
4320         length -= BytesPerInt;
4321         offset += BytesPerInt;
4322       }
4323       nonstatic_short_space_offset = offset;
4324       while (length >= BytesPerShort && nonstatic_short_count > 0) {
4325         nonstatic_short_count       -= 1;
4326         nonstatic_short_space_count += 1;
4327         length -= BytesPerShort;
4328         offset += BytesPerShort;
4329       }
4330       nonstatic_byte_space_offset = offset;
4331       while (length > 0 && nonstatic_byte_count > 0) {
4332         nonstatic_byte_count       -= 1;
4333         nonstatic_byte_space_count += 1;
4334         length -= 1;
4335       }
4336       // Allocate oop field in the gap if there are no other fields for that.
4337       nonstatic_oop_space_offset = offset;
4338       if (length >= heapOopSize && nonstatic_oop_count > 0 &&
4339           !allocate_oops_first) { // when oop fields not first
4340         nonstatic_oop_count      -= 1;
4341         nonstatic_oop_space_count = 1; // Only one will fit
4342         length -= heapOopSize;
4343         offset += heapOopSize;
4344       }
4345     }
4346   }
4347 
4348   int next_nonstatic_word_offset = next_nonstatic_double_offset +
4349                                      (nonstatic_double_count * BytesPerLong);
4350   int next_nonstatic_short_offset = next_nonstatic_word_offset +
4351                                       (nonstatic_word_count * BytesPerInt);
4352   int next_nonstatic_byte_offset = next_nonstatic_short_offset +
4353                                      (nonstatic_short_count * BytesPerShort);
4354   int next_nonstatic_padded_offset = next_nonstatic_byte_offset +
4355                                        nonstatic_byte_count;
4356 
4357   // let oops jump before padding with this allocation style
4358   if (!allocate_oops_first) {
4359     next_nonstatic_oop_offset = next_nonstatic_padded_offset;
4360     if( nonstatic_oop_count > 0 ) {
4361       next_nonstatic_oop_offset = align_up(next_nonstatic_oop_offset, heapOopSize);
4362     }
4363     next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
4364   }
4365 
4366   // Iterate over fields again and compute correct offsets.
4367   // The field allocation type was temporarily stored in the offset slot.
4368   // oop fields are located before non-oop fields (static and non-static).
4369   for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) {
4370 
4371     // skip already laid out fields
4372     if (fs.is_offset_set()) continue;
4373 
4374     // contended instance fields are handled below
4375     if (fs.is_contended() && !fs.access_flags().is_static()) continue;
4376 
4377     int real_offset = 0;
4378     const FieldAllocationType atype = (const FieldAllocationType) fs.allocation_type();
4379 
4380     // pack the rest of the fields
4381     switch (atype) {
4382       case STATIC_OOP:
4383         real_offset = next_static_oop_offset;
4384         next_static_oop_offset += heapOopSize;
4385         break;
4386       case STATIC_BYTE:
4387         real_offset = next_static_byte_offset;
4388         next_static_byte_offset += 1;
4389         break;
4390       case STATIC_SHORT:
4391         real_offset = next_static_short_offset;
4392         next_static_short_offset += BytesPerShort;
4393         break;
4394       case STATIC_WORD:
4395         real_offset = next_static_word_offset;
4396         next_static_word_offset += BytesPerInt;
4397         break;
4398       case STATIC_DOUBLE:
4399         real_offset = next_static_double_offset;
4400         next_static_double_offset += BytesPerLong;
4401         break;
4402       case NONSTATIC_OOP:
4403         if( nonstatic_oop_space_count > 0 ) {
4404           real_offset = nonstatic_oop_space_offset;
4405           nonstatic_oop_space_offset += heapOopSize;
4406           nonstatic_oop_space_count  -= 1;
4407         } else {
4408           real_offset = next_nonstatic_oop_offset;
4409           next_nonstatic_oop_offset += heapOopSize;
4410         }
4411         nonstatic_oop_maps->add(real_offset, 1);
4412         break;
4413       case NONSTATIC_BYTE:
4414         if( nonstatic_byte_space_count > 0 ) {
4415           real_offset = nonstatic_byte_space_offset;
4416           nonstatic_byte_space_offset += 1;
4417           nonstatic_byte_space_count  -= 1;
4418         } else {
4419           real_offset = next_nonstatic_byte_offset;
4420           next_nonstatic_byte_offset += 1;
4421         }
4422         break;
4423       case NONSTATIC_SHORT:
4424         if( nonstatic_short_space_count > 0 ) {
4425           real_offset = nonstatic_short_space_offset;
4426           nonstatic_short_space_offset += BytesPerShort;
4427           nonstatic_short_space_count  -= 1;
4428         } else {
4429           real_offset = next_nonstatic_short_offset;
4430           next_nonstatic_short_offset += BytesPerShort;
4431         }
4432         break;
4433       case NONSTATIC_WORD:
4434         if( nonstatic_word_space_count > 0 ) {
4435           real_offset = nonstatic_word_space_offset;
4436           nonstatic_word_space_offset += BytesPerInt;
4437           nonstatic_word_space_count  -= 1;
4438         } else {
4439           real_offset = next_nonstatic_word_offset;
4440           next_nonstatic_word_offset += BytesPerInt;
4441         }
4442         break;
4443       case NONSTATIC_DOUBLE:
4444         real_offset = next_nonstatic_double_offset;
4445         next_nonstatic_double_offset += BytesPerLong;
4446         break;
4447       default:
4448         ShouldNotReachHere();
4449     }
4450     fs.set_offset(real_offset);
4451   }
4452 
4453 
4454   // Handle the contended cases.
4455   //
4456   // Each contended field should not intersect the cache line with another contended field.
4457   // In the absence of alignment information, we end up with pessimistically separating
4458   // the fields with full-width padding.
4459   //
4460   // Additionally, this should not break alignment for the fields, so we round the alignment up
4461   // for each field.
4462   if (nonstatic_contended_count > 0) {
4463 
4464     // if there is at least one contended field, we need to have pre-padding for them
4465     next_nonstatic_padded_offset += ContendedPaddingWidth;
4466 
4467     // collect all contended groups
4468     ResourceBitMap bm(cp->size());
4469     for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) {
4470       // skip already laid out fields
4471       if (fs.is_offset_set()) continue;
4472 
4473       if (fs.is_contended()) {
4474         bm.set_bit(fs.contended_group());
4475       }
4476     }
4477 
4478     int current_group = -1;
4479     while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
4480 
4481       for (AllFieldStream fs(_fields, cp); !fs.done(); fs.next()) {
4482 
4483         // skip already laid out fields
4484         if (fs.is_offset_set()) continue;
4485 
4486         // skip non-contended fields and fields from different group
4487         if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
4488 
4489         // handle statics below
4490         if (fs.access_flags().is_static()) continue;
4491 
4492         int real_offset = 0;
4493         FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
4494 
4495         switch (atype) {
4496           case NONSTATIC_BYTE:
4497             next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, 1);
4498             real_offset = next_nonstatic_padded_offset;
4499             next_nonstatic_padded_offset += 1;
4500             break;
4501 
4502           case NONSTATIC_SHORT:
4503             next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, BytesPerShort);
4504             real_offset = next_nonstatic_padded_offset;
4505             next_nonstatic_padded_offset += BytesPerShort;
4506             break;
4507 
4508           case NONSTATIC_WORD:
4509             next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, BytesPerInt);
4510             real_offset = next_nonstatic_padded_offset;
4511             next_nonstatic_padded_offset += BytesPerInt;
4512             break;
4513 
4514           case NONSTATIC_DOUBLE:
4515             next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, BytesPerLong);
4516             real_offset = next_nonstatic_padded_offset;
4517             next_nonstatic_padded_offset += BytesPerLong;
4518             break;
4519 
4520           case NONSTATIC_OOP:
4521             next_nonstatic_padded_offset = align_up(next_nonstatic_padded_offset, heapOopSize);
4522             real_offset = next_nonstatic_padded_offset;
4523             next_nonstatic_padded_offset += heapOopSize;
4524             nonstatic_oop_maps->add(real_offset, 1);
4525             break;
4526 
4527           default:
4528             ShouldNotReachHere();
4529         }
4530 
4531         if (fs.contended_group() == 0) {
4532           // Contended group defines the equivalence class over the fields:
4533           // the fields within the same contended group are not inter-padded.
4534           // The only exception is default group, which does not incur the
4535           // equivalence, and so requires intra-padding.
4536           next_nonstatic_padded_offset += ContendedPaddingWidth;
4537         }
4538 
4539         fs.set_offset(real_offset);
4540       } // for
4541 
4542       // Start laying out the next group.
4543       // Note that this will effectively pad the last group in the back;
4544       // this is expected to alleviate memory contention effects for
4545       // subclass fields and/or adjacent object.
4546       // If this was the default group, the padding is already in place.
4547       if (current_group != 0) {
4548         next_nonstatic_padded_offset += ContendedPaddingWidth;
4549       }
4550     }
4551 
4552     // handle static fields
4553   }
4554 
4555   // Entire class is contended, pad in the back.
4556   // This helps to alleviate memory contention effects for subclass fields
4557   // and/or adjacent object.
4558   if (is_contended_class) {
4559     next_nonstatic_padded_offset += ContendedPaddingWidth;
4560   }
4561 
4562   int notaligned_nonstatic_fields_end = next_nonstatic_padded_offset;
4563 
4564   int nonstatic_fields_end      = align_up(notaligned_nonstatic_fields_end, heapOopSize);
4565   int instance_end              = align_up(notaligned_nonstatic_fields_end, wordSize);
4566   int static_fields_end         = align_up(next_static_byte_offset, wordSize);
4567 
4568   int static_field_size         = (static_fields_end -
4569                                    InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
4570   nonstatic_field_size          = nonstatic_field_size +
4571                                   (nonstatic_fields_end - nonstatic_fields_start) / heapOopSize;
4572 
4573   int instance_size             = align_object_size(instance_end / wordSize);
4574 
4575   assert(instance_size == align_object_size(align_up(
4576          (instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize),
4577           wordSize) / wordSize), "consistent layout helper value");
4578 
4579   // Invariant: nonstatic_field end/start should only change if there are
4580   // nonstatic fields in the class, or if the class is contended. We compare
4581   // against the non-aligned value, so that end alignment will not fail the
4582   // assert without actually having the fields.
4583   assert((notaligned_nonstatic_fields_end == nonstatic_fields_start) ||
4584          is_contended_class ||
4585          (nonstatic_fields_count > 0), "double-check nonstatic start/end");
4586 
4587   // Number of non-static oop map blocks allocated at end of klass.
4588   nonstatic_oop_maps->compact();
4589 
4590 #ifndef PRODUCT
4591   if (PrintFieldLayout) {
4592     print_field_layout(_class_name,
4593           _fields,
4594           cp,
4595           instance_size,
4596           nonstatic_fields_start,
4597           nonstatic_fields_end,
4598           static_fields_end);
4599   }
4600 
4601 #endif
4602   // Pass back information needed for InstanceKlass creation
4603   info->oop_map_blocks = nonstatic_oop_maps;
4604   info->_instance_size = instance_size;
4605   info->_static_field_size = static_field_size;
4606   info->_nonstatic_field_size = nonstatic_field_size;
4607   info->_has_nonstatic_fields = has_nonstatic_fields;
4608 }
4609 
4610 void ClassFileParser::set_precomputed_flags(InstanceKlass* ik) {
4611   assert(ik != NULL, "invariant");
4612 
4613   const Klass* const super = ik->super();
4614 
4615   // Check if this klass has an empty finalize method (i.e. one with return bytecode only),
4616   // in which case we don't have to register objects as finalizable
4617   if (!_has_empty_finalizer) {
4618     if (_has_finalizer ||
4619         (super != NULL && super->has_finalizer())) {
4620       ik->set_has_finalizer();
4621     }
4622   }
4623 
4624 #ifdef ASSERT
4625   bool f = false;
4626   const Method* const m = ik->lookup_method(vmSymbols::finalize_method_name(),
4627                                            vmSymbols::void_method_signature());
4628   if (m != NULL && !m->is_empty_method()) {
4629       f = true;


6651   klassVtable::compute_vtable_size_and_num_mirandas(&_vtable_size,
6652                                                     &_num_miranda_methods,
6653                                                     _all_mirandas,
6654                                                     _super_klass,
6655                                                     _methods,
6656                                                     _access_flags,
6657                                                     _major_version,
6658                                                     loader,
6659                                                     _class_name,
6660                                                     _local_interfaces,
6661                                                     CHECK);
6662 
6663   // Size of Java itable (in words)
6664   _itable_size = _access_flags.is_interface() ? 0 :
6665     klassItable::compute_itable_size(_transitive_interfaces);
6666 
6667   assert(_fac != NULL, "invariant");
6668   assert(_parsed_annotations != NULL, "invariant");
6669 
6670   _field_info = new FieldLayoutInfo();
6671   if (UseNewFieldLayout) {
6672     FieldLayoutBuilder lb(class_name(), super_klass(), _cp, _fields,
6673                           _parsed_annotations->is_contended(), _field_info);
6674     lb.build_layout();
6675   } else {
6676     layout_fields(cp, _fac, _parsed_annotations, _field_info, CHECK);
6677   }
6678 
6679   // Compute reference typ
6680   _rt = (NULL ==_super_klass) ? REF_NONE : _super_klass->reference_type();
6681 
6682 }
6683 
6684 void ClassFileParser::set_klass(InstanceKlass* klass) {
6685 
6686 #ifdef ASSERT
6687   if (klass != NULL) {
6688     assert(NULL == _klass, "leaking?");
6689   }
6690 #endif
6691 
6692   _klass = klass;
6693 }
6694 
6695 void ClassFileParser::set_klass_to_deallocate(InstanceKlass* klass) {
6696 
6697 #ifdef ASSERT




4011                    super_class_index,
4012                    CHECK_NULL);
4013     // The class name should be legal because it is checked when parsing constant pool.
4014     // However, make sure it is not an array type.
4015     bool is_array = false;
4016     if (cp->tag_at(super_class_index).is_klass()) {
4017       super_klass = InstanceKlass::cast(cp->resolved_klass_at(super_class_index));
4018       if (need_verify)
4019         is_array = super_klass->is_array_klass();
4020     } else if (need_verify) {
4021       is_array = (cp->klass_name_at(super_class_index)->char_at(0) == JVM_SIGNATURE_ARRAY);
4022     }
4023     if (need_verify) {
4024       guarantee_property(!is_array,
4025                         "Bad superclass name in class file %s", CHECK_NULL);
4026     }
4027   }
4028   return super_klass;
4029 }
4030 





































4031 OopMapBlocksBuilder::OopMapBlocksBuilder(unsigned int max_blocks) {
4032   _max_nonstatic_oop_maps = max_blocks;
4033   _nonstatic_oop_map_count = 0;
4034   if (max_blocks == 0) {
4035     _nonstatic_oop_maps = NULL;
4036   } else {
4037     _nonstatic_oop_maps =
4038         NEW_RESOURCE_ARRAY(OopMapBlock, _max_nonstatic_oop_maps);
4039     memset(_nonstatic_oop_maps, 0, sizeof(OopMapBlock) * max_blocks);
4040   }
4041 }
4042 
4043 OopMapBlock* OopMapBlocksBuilder::last_oop_map() const {
4044   assert(_nonstatic_oop_map_count > 0, "Has no oop maps");
4045   return _nonstatic_oop_maps + (_nonstatic_oop_map_count - 1);
4046 }
4047 
4048 // addition of super oop maps
4049 void OopMapBlocksBuilder::initialize_inherited_blocks(OopMapBlock* blocks, unsigned int nof_blocks) {
4050   assert(nof_blocks && _nonstatic_oop_map_count == 0 &&


4127 }
4128 
4129 void OopMapBlocksBuilder::print_on(outputStream* st) const {
4130   st->print_cr("  OopMapBlocks: %3d  /%3d", _nonstatic_oop_map_count, _max_nonstatic_oop_maps);
4131   if (_nonstatic_oop_map_count > 0) {
4132     OopMapBlock* map = _nonstatic_oop_maps;
4133     OopMapBlock* last_map = last_oop_map();
4134     assert(map <= last_map, "Last less than first");
4135     while (map <= last_map) {
4136       st->print_cr("    Offset: %3d  -%3d Count: %3d", map->offset(),
4137                    map->offset() + map->offset_span() - heapOopSize, map->count());
4138       map++;
4139     }
4140   }
4141 }
4142 
4143 void OopMapBlocksBuilder::print_value_on(outputStream* st) const {
4144   print_on(st);
4145 }
4146 










































































































































































































































































































































































































































4147 void ClassFileParser::set_precomputed_flags(InstanceKlass* ik) {
4148   assert(ik != NULL, "invariant");
4149 
4150   const Klass* const super = ik->super();
4151 
4152   // Check if this klass has an empty finalize method (i.e. one with return bytecode only),
4153   // in which case we don't have to register objects as finalizable
4154   if (!_has_empty_finalizer) {
4155     if (_has_finalizer ||
4156         (super != NULL && super->has_finalizer())) {
4157       ik->set_has_finalizer();
4158     }
4159   }
4160 
4161 #ifdef ASSERT
4162   bool f = false;
4163   const Method* const m = ik->lookup_method(vmSymbols::finalize_method_name(),
4164                                            vmSymbols::void_method_signature());
4165   if (m != NULL && !m->is_empty_method()) {
4166       f = true;


6188   klassVtable::compute_vtable_size_and_num_mirandas(&_vtable_size,
6189                                                     &_num_miranda_methods,
6190                                                     _all_mirandas,
6191                                                     _super_klass,
6192                                                     _methods,
6193                                                     _access_flags,
6194                                                     _major_version,
6195                                                     loader,
6196                                                     _class_name,
6197                                                     _local_interfaces,
6198                                                     CHECK);
6199 
6200   // Size of Java itable (in words)
6201   _itable_size = _access_flags.is_interface() ? 0 :
6202     klassItable::compute_itable_size(_transitive_interfaces);
6203 
6204   assert(_fac != NULL, "invariant");
6205   assert(_parsed_annotations != NULL, "invariant");
6206 
6207   _field_info = new FieldLayoutInfo();

6208   FieldLayoutBuilder lb(class_name(), super_klass(), _cp, _fields,
6209                         _parsed_annotations->is_contended(), _field_info);
6210   lb.build_layout();



6211 
6212   // Compute reference typ
6213   _rt = (NULL ==_super_klass) ? REF_NONE : _super_klass->reference_type();
6214 
6215 }
6216 
6217 void ClassFileParser::set_klass(InstanceKlass* klass) {
6218 
6219 #ifdef ASSERT
6220   if (klass != NULL) {
6221     assert(NULL == _klass, "leaking?");
6222   }
6223 #endif
6224 
6225   _klass = klass;
6226 }
6227 
6228 void ClassFileParser::set_klass_to_deallocate(InstanceKlass* klass) {
6229 
6230 #ifdef ASSERT


< prev index next >