< prev index next >

src/hotspot/share/memory/metaspace.cpp

Print this page
rev 49210 : imported patch 8185034-metaspace-cleanup-1-rename-metaspaceaux
rev 49211 : 8199431: Split up class Metaspace into a static and a non-static part
Reviewed-by:


1209   };
1210 };
1211 
1212 class Metadebug : AllStatic {
1213   // Debugging support for Metaspaces
1214   static int _allocation_fail_alot_count;
1215 
1216  public:
1217 
1218   static void init_allocation_fail_alot_count();
1219 #ifdef ASSERT
1220   static bool test_metadata_failure();
1221 #endif
1222 };
1223 
1224 int Metadebug::_allocation_fail_alot_count = 0;
1225 
1226 //  SpaceManager - used by Metaspace to handle allocations
1227 class SpaceManager : public CHeapObj<mtClass> {
1228   friend class Metaspace;

1229   friend class Metadebug;
1230 
1231  private:
1232 
1233   // protects allocations
1234   Mutex* const _lock;
1235 
1236   // Type of metadata allocated.
1237   const Metaspace::MetadataType   _mdtype;
1238 
1239   // Type of metaspace
1240   const Metaspace::MetaspaceType  _space_type;
1241 
1242   // List of chunks in use by this SpaceManager.  Allocations
1243   // are done from the current chunk.  The list is used for deallocating
1244   // chunks when the SpaceManager is freed.
1245   Metachunk* _chunks_in_use[NumberOfInUseLists];
1246   Metachunk* _current_chunk;
1247 
1248   // Maximum number of small chunks to allocate to a SpaceManager


3814          words, mdtype, used_words(mdtype));
3815   // For CMS deallocation of the Metaspaces occurs during the
3816   // sweep which is a concurrent phase.  Protection by the expand_lock()
3817   // is not enough since allocation is on a per Metaspace basis
3818   // and protected by the Metaspace lock.
3819   Atomic::sub(words, &_used_words[mdtype]);
3820 }
3821 
3822 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
3823   // _used_words tracks allocations for
3824   // each piece of metadata.  Those allocations are
3825   // generally done concurrently by different application
3826   // threads so must be done atomically.
3827   Atomic::add(words, &_used_words[mdtype]);
3828 }
3829 
3830 size_t MetaspaceUtils::used_bytes_slow(Metaspace::MetadataType mdtype) {
3831   size_t used = 0;
3832   ClassLoaderDataGraphMetaspaceIterator iter;
3833   while (iter.repeat()) {
3834     Metaspace* msp = iter.get_next();
3835     // Sum allocated_blocks_words for each metaspace
3836     if (msp != NULL) {
3837       used += msp->used_words_slow(mdtype);
3838     }
3839   }
3840   return used * BytesPerWord;
3841 }
3842 
3843 size_t MetaspaceUtils::free_bytes_slow(Metaspace::MetadataType mdtype) {
3844   size_t free = 0;
3845   ClassLoaderDataGraphMetaspaceIterator iter;
3846   while (iter.repeat()) {
3847     Metaspace* msp = iter.get_next();
3848     if (msp != NULL) {
3849       free += msp->free_words_slow(mdtype);
3850     }
3851   }
3852   return free * BytesPerWord;
3853 }
3854 
3855 size_t MetaspaceUtils::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
3856   if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
3857     return 0;
3858   }
3859   // Don't count the space in the freelists.  That space will be
3860   // added to the capacity calculation as needed.
3861   size_t capacity = 0;
3862   ClassLoaderDataGraphMetaspaceIterator iter;
3863   while (iter.repeat()) {
3864     Metaspace* msp = iter.get_next();
3865     if (msp != NULL) {
3866       capacity += msp->capacity_words_slow(mdtype);
3867     }
3868   }
3869   return capacity * BytesPerWord;
3870 }
3871 
3872 size_t MetaspaceUtils::capacity_bytes_slow() {
3873 #ifdef PRODUCT
3874   // Use capacity_bytes() in PRODUCT instead of this function.
3875   guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
3876 #endif
3877   size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
3878   size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
3879   assert(capacity_bytes() == class_capacity + non_class_capacity,
3880          "bad accounting: capacity_bytes() " SIZE_FORMAT
3881          " class_capacity + non_class_capacity " SIZE_FORMAT
3882          " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
3883          capacity_bytes(), class_capacity + non_class_capacity,
3884          class_capacity, non_class_capacity);


3977   out->print_cr("  Chunk accounting: (used in chunks " SIZE_FORMAT
3978              "K + unused in chunks " SIZE_FORMAT "K  + "
3979              " capacity in free chunks " SIZE_FORMAT "K) = " SIZE_FORMAT
3980              "K  capacity in allocated chunks " SIZE_FORMAT "K",
3981              used_bytes / K,
3982              free_bytes / K,
3983              free_chunks_capacity_bytes / K,
3984              used_and_free / K,
3985              capacity_bytes / K);
3986   // Accounting can only be correct if we got the values during a safepoint
3987   assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
3988 }
3989 
3990 // Print total fragmentation for class metaspaces
3991 void MetaspaceUtils::print_class_waste(outputStream* out) {
3992   assert(Metaspace::using_class_space(), "class metaspace not used");
3993   size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
3994   size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
3995   ClassLoaderDataGraphMetaspaceIterator iter;
3996   while (iter.repeat()) {
3997     Metaspace* msp = iter.get_next();
3998     if (msp != NULL) {
3999       cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4000       cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4001       cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4002       cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
4003       cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4004       cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
4005       cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4006     }
4007   }
4008   out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4009                 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4010                 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4011                 "large count " SIZE_FORMAT,
4012                 cls_specialized_count, cls_specialized_waste,
4013                 cls_small_count, cls_small_waste,
4014                 cls_medium_count, cls_medium_waste, cls_humongous_count);
4015 }
4016 
4017 // Print total fragmentation for data and class metaspaces separately
4018 void MetaspaceUtils::print_waste(outputStream* out) {
4019   size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
4020   size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
4021 
4022   ClassLoaderDataGraphMetaspaceIterator iter;
4023   while (iter.repeat()) {
4024     Metaspace* msp = iter.get_next();
4025     if (msp != NULL) {
4026       specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4027       specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4028       small_waste += msp->vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4029       small_count += msp->vsm()->sum_count_in_chunks_in_use(SmallIndex);
4030       medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4031       medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
4032       humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4033     }
4034   }
4035   out->print_cr("Total fragmentation waste (words) doesn't count free space");
4036   out->print_cr("  data: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4037                         SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4038                         SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4039                         "large count " SIZE_FORMAT,
4040              specialized_count, specialized_waste, small_count,
4041              small_waste, medium_count, medium_waste, humongous_count);
4042   if (Metaspace::using_class_space()) {
4043     print_class_waste(out);
4044   }


4089   size_t         _total_count;
4090   MetadataStats  _total_metadata;
4091   MetadataStats  _total_class;
4092 
4093   size_t         _total_anon_count;
4094   MetadataStats  _total_anon_metadata;
4095   MetadataStats  _total_anon_class;
4096 
4097 public:
4098   PrintCLDMetaspaceInfoClosure(outputStream* out, size_t scale = K)
4099   : _out(out), _scale(scale), _total_count(0), _total_anon_count(0) { }
4100 
4101   ~PrintCLDMetaspaceInfoClosure() {
4102     print_summary();
4103   }
4104 
4105   void do_cld(ClassLoaderData* cld) {
4106     assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
4107 
4108     if (cld->is_unloading()) return;
4109     Metaspace* msp = cld->metaspace_or_null();
4110     if (msp == NULL) {
4111       return;
4112     }
4113 
4114     bool anonymous = false;
4115     if (cld->is_anonymous()) {
4116       _out->print_cr("ClassLoader: for anonymous class");
4117       anonymous = true;
4118     } else {
4119       ResourceMark rm;
4120       _out->print_cr("ClassLoader: %s", cld->loader_name());
4121     }
4122 
4123     print_metaspace(msp, anonymous);
4124     _out->cr();
4125   }
4126 
4127 private:
4128   void print_metaspace(Metaspace* msp, bool anonymous);
4129   void print_summary() const;
4130 };
4131 
4132 void PrintCLDMetaspaceInfoClosure::print_metaspace(Metaspace* msp, bool anonymous){
4133   assert(msp != NULL, "Sanity");
4134   SpaceManager* vsm = msp->vsm();
4135   const char* unit = scale_unit(_scale);
4136 
4137   size_t capacity = vsm->sum_capacity_in_chunks_in_use() * BytesPerWord;
4138   size_t used = vsm->sum_used_in_chunks_in_use() * BytesPerWord;
4139   size_t free = vsm->sum_free_in_chunks_in_use() * BytesPerWord;
4140   size_t waste = vsm->sum_waste_in_chunks_in_use() * BytesPerWord;
4141 
4142   _total_count ++;
4143   MetadataStats metadata_stats(capacity, used, free, waste);
4144   _total_metadata.add(metadata_stats);
4145 
4146   if (anonymous) {
4147     _total_anon_count ++;
4148     _total_anon_metadata.add(metadata_stats);
4149   }
4150 
4151   _out->print("  Metadata   ");
4152   metadata_stats.print_on(_out, _scale);


4311            " used_bytes_slow(%u)" SIZE_FORMAT,
4312            i, used_bytes(i), i, used_in_use_bytes);
4313   }
4314 #endif
4315 }
4316 
4317 void MetaspaceUtils::verify_metrics() {
4318   verify_capacity();
4319   verify_used();
4320 }
4321 
4322 
4323 // Metaspace methods
4324 
4325 size_t Metaspace::_first_chunk_word_size = 0;
4326 size_t Metaspace::_first_class_chunk_word_size = 0;
4327 
4328 size_t Metaspace::_commit_alignment = 0;
4329 size_t Metaspace::_reserve_alignment = 0;
4330 
4331 Metaspace::Metaspace(Mutex* lock, MetaspaceType type) {
4332   initialize(lock, type);
4333 }
4334 
4335 Metaspace::~Metaspace() {
4336   delete _vsm;
4337   if (using_class_space()) {
4338     delete _class_vsm;
4339   }
4340 }
4341 
4342 VirtualSpaceList* Metaspace::_space_list = NULL;
4343 VirtualSpaceList* Metaspace::_class_space_list = NULL;
4344 
4345 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
4346 ChunkManager* Metaspace::_chunk_manager_class = NULL;
4347 
4348 #define VIRTUALSPACEMULTIPLIER 2
4349 
4350 #ifdef _LP64
4351 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
4352 
4353 void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
4354   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
4355   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
4356   // narrow_klass_base is the lower of the metaspace base and the cds base
4357   // (if cds is enabled).  The narrow_klass_shift depends on the distance


4656   // Arbitrarily set the initial virtual space to a multiple
4657   // of the boot class loader size.
4658   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
4659   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
4660 
4661   // Initialize the list of virtual spaces.
4662   _space_list = new VirtualSpaceList(word_size);
4663   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
4664 
4665   if (!_space_list->initialization_succeeded()) {
4666     vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
4667   }
4668 
4669   _tracer = new MetaspaceTracer();
4670 }
4671 
4672 void Metaspace::post_initialize() {
4673   MetaspaceGC::post_initialize();
4674 }
4675 
4676 void Metaspace::initialize_first_chunk(MetaspaceType type, MetadataType mdtype) {
4677   Metachunk* chunk = get_initialization_chunk(type, mdtype);
4678   if (chunk != NULL) {
4679     // Add to this manager's list of chunks in use and current_chunk().
4680     get_space_manager(mdtype)->add_chunk(chunk, true);
4681   }
4682 }
4683 
4684 Metachunk* Metaspace::get_initialization_chunk(MetaspaceType type, MetadataType mdtype) {
4685   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
4686 
4687   // Get a chunk from the chunk freelist
4688   Metachunk* chunk = get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
4689 
4690   if (chunk == NULL) {
4691     chunk = get_space_list(mdtype)->get_new_chunk(chunk_word_size,
4692                                                   get_space_manager(mdtype)->medium_chunk_bunch());
4693   }
4694 
4695   return chunk;
4696 }
4697 
4698 void Metaspace::verify_global_initialization() {
4699   assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
4700   assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
4701 
4702   if (using_class_space()) {
4703     assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
4704     assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
4705   }
4706 }
4707 
4708 void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
4709   verify_global_initialization();
4710 
4711   // Allocate SpaceManager for metadata objects.
4712   _vsm = new SpaceManager(NonClassType, type, lock);
4713 
4714   if (using_class_space()) {
4715     // Allocate SpaceManager for classes.
4716     _class_vsm = new SpaceManager(ClassType, type, lock);
4717   }
4718 
4719   MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
4720 
4721   // Allocate chunk for metadata objects
4722   initialize_first_chunk(type, NonClassType);
4723 
4724   // Allocate chunk for class metadata objects
4725   if (using_class_space()) {
4726     initialize_first_chunk(type, ClassType);
4727   }
4728 }
4729 
4730 size_t Metaspace::align_word_size_up(size_t word_size) {
4731   size_t byte_size = word_size * wordSize;
4732   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
4733 }
4734 
4735 MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
4736   assert(!_frozen, "sanity");
4737   // Don't use class_vsm() unless UseCompressedClassPointers is true.
4738   if (is_class_space_allocation(mdtype)) {
4739     return  class_vsm()->allocate(word_size);
4740   } else {
4741     return  vsm()->allocate(word_size);
4742   }
4743 }
4744 
4745 MetaWord* Metaspace::expand_and_allocate(size_t word_size, MetadataType mdtype) {
4746   assert(!_frozen, "sanity");
4747   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
4748   assert(delta_bytes > 0, "Must be");
4749 
4750   size_t before = 0;
4751   size_t after = 0;
4752   MetaWord* res;
4753   bool incremented;
4754 
4755   // Each thread increments the HWM at most once. Even if the thread fails to increment
4756   // the HWM, an allocation is still attempted. This is because another thread must then
4757   // have incremented the HWM and therefore the allocation might still succeed.
4758   do {
4759     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
4760     res = allocate(word_size, mdtype);
4761   } while (!incremented && res == NULL);
4762 
4763   if (incremented) {
4764     tracer()->report_gc_threshold(before, after,
4765                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
4766     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
4767   }
4768 
4769   return res;
4770 }
4771 
4772 size_t Metaspace::used_words_slow(MetadataType mdtype) const {
4773   if (mdtype == ClassType) {
4774     return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
4775   } else {
4776     return vsm()->sum_used_in_chunks_in_use();  // includes overhead!
4777   }
4778 }
4779 
4780 size_t Metaspace::free_words_slow(MetadataType mdtype) const {
4781   assert(!_frozen, "sanity");
4782   if (mdtype == ClassType) {
4783     return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
4784   } else {
4785     return vsm()->sum_free_in_chunks_in_use();
4786   }
4787 }
4788 
4789 // Space capacity in the Metaspace.  It includes
4790 // space in the list of chunks from which allocations
4791 // have been made. Don't include space in the global freelist and
4792 // in the space available in the dictionary which
4793 // is already counted in some chunk.
4794 size_t Metaspace::capacity_words_slow(MetadataType mdtype) const {
4795   if (mdtype == ClassType) {
4796     return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
4797   } else {
4798     return vsm()->sum_capacity_in_chunks_in_use();
4799   }
4800 }
4801 
4802 size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
4803   return used_words_slow(mdtype) * BytesPerWord;
4804 }
4805 
4806 size_t Metaspace::capacity_bytes_slow(MetadataType mdtype) const {
4807   return capacity_words_slow(mdtype) * BytesPerWord;
4808 }
4809 
4810 size_t Metaspace::allocated_blocks_bytes() const {
4811   return vsm()->allocated_blocks_bytes() +
4812       (using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0);
4813 }
4814 
4815 size_t Metaspace::allocated_chunks_bytes() const {
4816   return vsm()->allocated_chunks_bytes() +
4817       (using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0);
4818 }
4819 
4820 void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
4821   assert(!_frozen, "sanity");
4822   assert(!SafepointSynchronize::is_at_safepoint()
4823          || Thread::current()->is_VM_thread(), "should be the VM thread");
4824 
4825   MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
4826 
4827   if (is_class && using_class_space()) {
4828     class_vsm()->deallocate(ptr, word_size);
4829   } else {
4830     vsm()->deallocate(ptr, word_size);
4831   }
4832 }
4833 
4834 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
4835                               MetaspaceObj::Type type, TRAPS) {
4836   assert(!_frozen, "sanity");
4837   if (HAS_PENDING_EXCEPTION) {
4838     assert(false, "Should not allocate with exception pending");
4839     return NULL;  // caller does a CHECK_NULL too
4840   }
4841 
4842   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
4843         "ClassLoaderData::the_null_class_loader_data() should have been used.");
4844 
4845   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
4846 
4847   // Try to allocate metadata.


4858 
4859     // Allocation failed.
4860     if (is_init_completed()) {
4861       // Only start a GC if the bootstrapping has completed.
4862 
4863       // Try to clean out some memory and retry.
4864       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
4865     }
4866   }
4867 
4868   if (result == NULL) {
4869     report_metadata_oome(loader_data, word_size, type, mdtype, CHECK_NULL);
4870   }
4871 
4872   // Zero initialize.
4873   Copy::fill_to_words((HeapWord*)result, word_size, 0);
4874 
4875   return result;
4876 }
4877 
4878 size_t Metaspace::class_chunk_size(size_t word_size) {
4879   assert(using_class_space(), "Has to use class space");
4880   return class_vsm()->calc_chunk_size(word_size);
4881 }
4882 
4883 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4884   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4885 
4886   // If result is still null, we are out of memory.
4887   Log(gc, metaspace, freelist) log;
4888   if (log.is_info()) {
4889     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4890              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4891     ResourceMark rm;
4892     if (log.is_debug()) {
4893       if (loader_data->metaspace_or_null() != NULL) {
4894         LogStream ls(log.debug());
4895         loader_data->print_value_on(&ls);
4896       }
4897     }
4898     LogStream ls(log.info());
4899     MetaspaceUtils::dump(&ls);
4900     MetaspaceUtils::print_metaspace_map(&ls, mdtype);
4901     ChunkManager::print_all_chunkmanagers(&ls);
4902   }
4903 
4904   bool out_of_compressed_class_space = false;
4905   if (is_class_space_allocation(mdtype)) {
4906     Metaspace* metaspace = loader_data->metaspace_non_null();
4907     out_of_compressed_class_space =
4908       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
4909       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
4910       CompressedClassSpaceSize;
4911   }
4912 
4913   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
4914   const char* space_string = out_of_compressed_class_space ?
4915     "Compressed class space" : "Metaspace";
4916 
4917   report_java_out_of_memory(space_string);
4918 
4919   if (JvmtiExport::should_post_resource_exhausted()) {
4920     JvmtiExport::post_resource_exhausted(
4921         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
4922         space_string);
4923   }
4924 
4925   if (!is_init_completed()) {
4926     vm_exit_during_initialization("OutOfMemoryError", space_string);


4939     case Metaspace::NonClassType: return "Metadata";
4940     default:
4941       assert(false, "Got bad mdtype: %d", (int) mdtype);
4942       return NULL;
4943   }
4944 }
4945 
4946 void Metaspace::purge(MetadataType mdtype) {
4947   get_space_list(mdtype)->purge(get_chunk_manager(mdtype));
4948 }
4949 
4950 void Metaspace::purge() {
4951   MutexLockerEx cl(SpaceManager::expand_lock(),
4952                    Mutex::_no_safepoint_check_flag);
4953   purge(NonClassType);
4954   if (using_class_space()) {
4955     purge(ClassType);
4956   }
4957 }
4958 
4959 void Metaspace::print_on(outputStream* out) const {
4960   // Print both class virtual space counts and metaspace.
4961   if (Verbose) {
4962     vsm()->print_on(out);
4963     if (using_class_space()) {
4964       class_vsm()->print_on(out);
4965     }
4966   }
4967 }
4968 
4969 bool Metaspace::contains(const void* ptr) {
4970   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
4971     return true;
4972   }
4973   return contains_non_shared(ptr);
4974 }
4975 
4976 bool Metaspace::contains_non_shared(const void* ptr) {
4977   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
4978      return true;
4979   }
4980 
4981   return get_space_list(NonClassType)->contains(ptr);
4982 }
4983 
4984 void Metaspace::verify() {
4985   vsm()->verify();
4986   if (using_class_space()) {
4987     class_vsm()->verify();
4988   }
4989 }
4990 
4991 void Metaspace::dump(outputStream* const out) const {
4992   out->print_cr("\nVirtual space manager: " INTPTR_FORMAT, p2i(vsm()));
4993   vsm()->dump(out);
4994   if (using_class_space()) {
4995     out->print_cr("\nClass space manager: " INTPTR_FORMAT, p2i(class_vsm()));
4996     class_vsm()->dump(out);
4997   }
4998 }
4999 
5000 #ifdef ASSERT
5001 static void do_verify_chunk(Metachunk* chunk) {
5002   guarantee(chunk != NULL, "Sanity");
5003   // Verify chunk itself; then verify that it is consistent with the
5004   // occupany map of its containing node.
5005   chunk->verify();
5006   VirtualSpaceNode* const vsn = chunk->container();
5007   OccupancyMap* const ocmap = vsn->occupancy_map();
5008   ocmap->verify_for_chunk(chunk);
5009 }
5010 #endif
5011 
5012 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
5013   chunk->set_is_tagged_free(!inuse);
5014   OccupancyMap* const ocmap = chunk->container()->occupancy_map();




1209   };
1210 };
1211 
1212 class Metadebug : AllStatic {
1213   // Debugging support for Metaspaces
1214   static int _allocation_fail_alot_count;
1215 
1216  public:
1217 
1218   static void init_allocation_fail_alot_count();
1219 #ifdef ASSERT
1220   static bool test_metadata_failure();
1221 #endif
1222 };
1223 
1224 int Metadebug::_allocation_fail_alot_count = 0;
1225 
1226 //  SpaceManager - used by Metaspace to handle allocations
1227 class SpaceManager : public CHeapObj<mtClass> {
1228   friend class Metaspace;
1229   friend class ClassLoaderMetaspace;
1230   friend class Metadebug;
1231 
1232  private:
1233 
1234   // protects allocations
1235   Mutex* const _lock;
1236 
1237   // Type of metadata allocated.
1238   const Metaspace::MetadataType   _mdtype;
1239 
1240   // Type of metaspace
1241   const Metaspace::MetaspaceType  _space_type;
1242 
1243   // List of chunks in use by this SpaceManager.  Allocations
1244   // are done from the current chunk.  The list is used for deallocating
1245   // chunks when the SpaceManager is freed.
1246   Metachunk* _chunks_in_use[NumberOfInUseLists];
1247   Metachunk* _current_chunk;
1248 
1249   // Maximum number of small chunks to allocate to a SpaceManager


3815          words, mdtype, used_words(mdtype));
3816   // For CMS deallocation of the Metaspaces occurs during the
3817   // sweep which is a concurrent phase.  Protection by the expand_lock()
3818   // is not enough since allocation is on a per Metaspace basis
3819   // and protected by the Metaspace lock.
3820   Atomic::sub(words, &_used_words[mdtype]);
3821 }
3822 
3823 void MetaspaceUtils::inc_used(Metaspace::MetadataType mdtype, size_t words) {
3824   // _used_words tracks allocations for
3825   // each piece of metadata.  Those allocations are
3826   // generally done concurrently by different application
3827   // threads so must be done atomically.
3828   Atomic::add(words, &_used_words[mdtype]);
3829 }
3830 
3831 size_t MetaspaceUtils::used_bytes_slow(Metaspace::MetadataType mdtype) {
3832   size_t used = 0;
3833   ClassLoaderDataGraphMetaspaceIterator iter;
3834   while (iter.repeat()) {
3835     ClassLoaderMetaspace* msp = iter.get_next();
3836     // Sum allocated_blocks_words for each metaspace
3837     if (msp != NULL) {
3838       used += msp->used_words_slow(mdtype);
3839     }
3840   }
3841   return used * BytesPerWord;
3842 }
3843 
3844 size_t MetaspaceUtils::free_bytes_slow(Metaspace::MetadataType mdtype) {
3845   size_t free = 0;
3846   ClassLoaderDataGraphMetaspaceIterator iter;
3847   while (iter.repeat()) {
3848     ClassLoaderMetaspace* msp = iter.get_next();
3849     if (msp != NULL) {
3850       free += msp->free_words_slow(mdtype);
3851     }
3852   }
3853   return free * BytesPerWord;
3854 }
3855 
3856 size_t MetaspaceUtils::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
3857   if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
3858     return 0;
3859   }
3860   // Don't count the space in the freelists.  That space will be
3861   // added to the capacity calculation as needed.
3862   size_t capacity = 0;
3863   ClassLoaderDataGraphMetaspaceIterator iter;
3864   while (iter.repeat()) {
3865     ClassLoaderMetaspace* msp = iter.get_next();
3866     if (msp != NULL) {
3867       capacity += msp->capacity_words_slow(mdtype);
3868     }
3869   }
3870   return capacity * BytesPerWord;
3871 }
3872 
3873 size_t MetaspaceUtils::capacity_bytes_slow() {
3874 #ifdef PRODUCT
3875   // Use capacity_bytes() in PRODUCT instead of this function.
3876   guarantee(false, "Should not call capacity_bytes_slow() in the PRODUCT");
3877 #endif
3878   size_t class_capacity = capacity_bytes_slow(Metaspace::ClassType);
3879   size_t non_class_capacity = capacity_bytes_slow(Metaspace::NonClassType);
3880   assert(capacity_bytes() == class_capacity + non_class_capacity,
3881          "bad accounting: capacity_bytes() " SIZE_FORMAT
3882          " class_capacity + non_class_capacity " SIZE_FORMAT
3883          " class_capacity " SIZE_FORMAT " non_class_capacity " SIZE_FORMAT,
3884          capacity_bytes(), class_capacity + non_class_capacity,
3885          class_capacity, non_class_capacity);


3978   out->print_cr("  Chunk accounting: (used in chunks " SIZE_FORMAT
3979              "K + unused in chunks " SIZE_FORMAT "K  + "
3980              " capacity in free chunks " SIZE_FORMAT "K) = " SIZE_FORMAT
3981              "K  capacity in allocated chunks " SIZE_FORMAT "K",
3982              used_bytes / K,
3983              free_bytes / K,
3984              free_chunks_capacity_bytes / K,
3985              used_and_free / K,
3986              capacity_bytes / K);
3987   // Accounting can only be correct if we got the values during a safepoint
3988   assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
3989 }
3990 
3991 // Print total fragmentation for class metaspaces
3992 void MetaspaceUtils::print_class_waste(outputStream* out) {
3993   assert(Metaspace::using_class_space(), "class metaspace not used");
3994   size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
3995   size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
3996   ClassLoaderDataGraphMetaspaceIterator iter;
3997   while (iter.repeat()) {
3998     ClassLoaderMetaspace* msp = iter.get_next();
3999     if (msp != NULL) {
4000       cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4001       cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4002       cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4003       cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
4004       cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4005       cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
4006       cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4007     }
4008   }
4009   out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4010                 SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4011                 SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4012                 "large count " SIZE_FORMAT,
4013                 cls_specialized_count, cls_specialized_waste,
4014                 cls_small_count, cls_small_waste,
4015                 cls_medium_count, cls_medium_waste, cls_humongous_count);
4016 }
4017 
4018 // Print total fragmentation for data and class metaspaces separately
4019 void MetaspaceUtils::print_waste(outputStream* out) {
4020   size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
4021   size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
4022 
4023   ClassLoaderDataGraphMetaspaceIterator iter;
4024   while (iter.repeat()) {
4025     ClassLoaderMetaspace* msp = iter.get_next();
4026     if (msp != NULL) {
4027       specialized_waste += msp->vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
4028       specialized_count += msp->vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
4029       small_waste += msp->vsm()->sum_waste_in_chunks_in_use(SmallIndex);
4030       small_count += msp->vsm()->sum_count_in_chunks_in_use(SmallIndex);
4031       medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
4032       medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
4033       humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
4034     }
4035   }
4036   out->print_cr("Total fragmentation waste (words) doesn't count free space");
4037   out->print_cr("  data: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
4038                         SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
4039                         SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
4040                         "large count " SIZE_FORMAT,
4041              specialized_count, specialized_waste, small_count,
4042              small_waste, medium_count, medium_waste, humongous_count);
4043   if (Metaspace::using_class_space()) {
4044     print_class_waste(out);
4045   }


4090   size_t         _total_count;
4091   MetadataStats  _total_metadata;
4092   MetadataStats  _total_class;
4093 
4094   size_t         _total_anon_count;
4095   MetadataStats  _total_anon_metadata;
4096   MetadataStats  _total_anon_class;
4097 
4098 public:
4099   PrintCLDMetaspaceInfoClosure(outputStream* out, size_t scale = K)
4100   : _out(out), _scale(scale), _total_count(0), _total_anon_count(0) { }
4101 
4102   ~PrintCLDMetaspaceInfoClosure() {
4103     print_summary();
4104   }
4105 
4106   void do_cld(ClassLoaderData* cld) {
4107     assert(SafepointSynchronize::is_at_safepoint(), "Must be at a safepoint");
4108 
4109     if (cld->is_unloading()) return;
4110     ClassLoaderMetaspace* msp = cld->metaspace_or_null();
4111     if (msp == NULL) {
4112       return;
4113     }
4114 
4115     bool anonymous = false;
4116     if (cld->is_anonymous()) {
4117       _out->print_cr("ClassLoader: for anonymous class");
4118       anonymous = true;
4119     } else {
4120       ResourceMark rm;
4121       _out->print_cr("ClassLoader: %s", cld->loader_name());
4122     }
4123 
4124     print_metaspace(msp, anonymous);
4125     _out->cr();
4126   }
4127 
4128 private:
4129   void print_metaspace(ClassLoaderMetaspace* msp, bool anonymous);
4130   void print_summary() const;
4131 };
4132 
4133 void PrintCLDMetaspaceInfoClosure::print_metaspace(ClassLoaderMetaspace* msp, bool anonymous){
4134   assert(msp != NULL, "Sanity");
4135   SpaceManager* vsm = msp->vsm();
4136   const char* unit = scale_unit(_scale);
4137 
4138   size_t capacity = vsm->sum_capacity_in_chunks_in_use() * BytesPerWord;
4139   size_t used = vsm->sum_used_in_chunks_in_use() * BytesPerWord;
4140   size_t free = vsm->sum_free_in_chunks_in_use() * BytesPerWord;
4141   size_t waste = vsm->sum_waste_in_chunks_in_use() * BytesPerWord;
4142 
4143   _total_count ++;
4144   MetadataStats metadata_stats(capacity, used, free, waste);
4145   _total_metadata.add(metadata_stats);
4146 
4147   if (anonymous) {
4148     _total_anon_count ++;
4149     _total_anon_metadata.add(metadata_stats);
4150   }
4151 
4152   _out->print("  Metadata   ");
4153   metadata_stats.print_on(_out, _scale);


4312            " used_bytes_slow(%u)" SIZE_FORMAT,
4313            i, used_bytes(i), i, used_in_use_bytes);
4314   }
4315 #endif
4316 }
4317 
4318 void MetaspaceUtils::verify_metrics() {
4319   verify_capacity();
4320   verify_used();
4321 }
4322 
4323 
4324 // Metaspace methods
4325 
4326 size_t Metaspace::_first_chunk_word_size = 0;
4327 size_t Metaspace::_first_class_chunk_word_size = 0;
4328 
4329 size_t Metaspace::_commit_alignment = 0;
4330 size_t Metaspace::_reserve_alignment = 0;
4331 
4332 ClassLoaderMetaspace::ClassLoaderMetaspace(Mutex* lock, Metaspace::MetaspaceType type) {
4333   initialize(lock, type);
4334 }
4335 
4336 ClassLoaderMetaspace::~ClassLoaderMetaspace() {
4337   delete _vsm;
4338   if (Metaspace::using_class_space()) {
4339     delete _class_vsm;
4340   }
4341 }
4342 
4343 VirtualSpaceList* Metaspace::_space_list = NULL;
4344 VirtualSpaceList* Metaspace::_class_space_list = NULL;
4345 
4346 ChunkManager* Metaspace::_chunk_manager_metadata = NULL;
4347 ChunkManager* Metaspace::_chunk_manager_class = NULL;
4348 
4349 #define VIRTUALSPACEMULTIPLIER 2
4350 
4351 #ifdef _LP64
4352 static const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
4353 
4354 void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
4355   assert(!DumpSharedSpaces, "narrow_klass is set by MetaspaceShared class.");
4356   // Figure out the narrow_klass_base and the narrow_klass_shift.  The
4357   // narrow_klass_base is the lower of the metaspace base and the cds base
4358   // (if cds is enabled).  The narrow_klass_shift depends on the distance


4657   // Arbitrarily set the initial virtual space to a multiple
4658   // of the boot class loader size.
4659   size_t word_size = VIRTUALSPACEMULTIPLIER * _first_chunk_word_size;
4660   word_size = align_up(word_size, Metaspace::reserve_alignment_words());
4661 
4662   // Initialize the list of virtual spaces.
4663   _space_list = new VirtualSpaceList(word_size);
4664   _chunk_manager_metadata = new ChunkManager(false/*metaspace*/);
4665 
4666   if (!_space_list->initialization_succeeded()) {
4667     vm_exit_during_initialization("Unable to setup metadata virtual space list.", NULL);
4668   }
4669 
4670   _tracer = new MetaspaceTracer();
4671 }
4672 
4673 void Metaspace::post_initialize() {
4674   MetaspaceGC::post_initialize();
4675 }
4676 
4677 void ClassLoaderMetaspace::initialize_first_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
4678   Metachunk* chunk = get_initialization_chunk(type, mdtype);
4679   if (chunk != NULL) {
4680     // Add to this manager's list of chunks in use and current_chunk().
4681     get_space_manager(mdtype)->add_chunk(chunk, true);
4682   }
4683 }
4684 
4685 Metachunk* ClassLoaderMetaspace::get_initialization_chunk(Metaspace::MetaspaceType type, Metaspace::MetadataType mdtype) {
4686   size_t chunk_word_size = get_space_manager(mdtype)->get_initial_chunk_size(type);
4687 
4688   // Get a chunk from the chunk freelist
4689   Metachunk* chunk = Metaspace::get_chunk_manager(mdtype)->chunk_freelist_allocate(chunk_word_size);
4690 
4691   if (chunk == NULL) {
4692     chunk = Metaspace::get_space_list(mdtype)->get_new_chunk(chunk_word_size,
4693                                                   get_space_manager(mdtype)->medium_chunk_bunch());
4694   }
4695 
4696   return chunk;
4697 }
4698 
4699 void Metaspace::verify_global_initialization() {
4700   assert(space_list() != NULL, "Metadata VirtualSpaceList has not been initialized");
4701   assert(chunk_manager_metadata() != NULL, "Metadata ChunkManager has not been initialized");
4702 
4703   if (using_class_space()) {
4704     assert(class_space_list() != NULL, "Class VirtualSpaceList has not been initialized");
4705     assert(chunk_manager_class() != NULL, "Class ChunkManager has not been initialized");
4706   }
4707 }
4708 
4709 void ClassLoaderMetaspace::initialize(Mutex* lock, Metaspace::MetaspaceType type) {
4710   Metaspace::verify_global_initialization();
4711 
4712   // Allocate SpaceManager for metadata objects.
4713   _vsm = new SpaceManager(Metaspace::NonClassType, type, lock);
4714 
4715   if (Metaspace::using_class_space()) {
4716     // Allocate SpaceManager for classes.
4717     _class_vsm = new SpaceManager(Metaspace::ClassType, type, lock);
4718   }
4719 
4720   MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
4721 
4722   // Allocate chunk for metadata objects
4723   initialize_first_chunk(type, Metaspace::NonClassType);
4724 
4725   // Allocate chunk for class metadata objects
4726   if (Metaspace::using_class_space()) {
4727     initialize_first_chunk(type, Metaspace::ClassType);
4728   }
4729 }
4730 
4731 size_t Metaspace::align_word_size_up(size_t word_size) {
4732   size_t byte_size = word_size * wordSize;
4733   return ReservedSpace::allocation_align_size_up(byte_size) / wordSize;
4734 }
4735 
4736 MetaWord* ClassLoaderMetaspace::allocate(size_t word_size, Metaspace::MetadataType mdtype) {
4737   Metaspace::assert_not_frozen();
4738   // Don't use class_vsm() unless UseCompressedClassPointers is true.
4739   if (Metaspace::is_class_space_allocation(mdtype)) {
4740     return  class_vsm()->allocate(word_size);
4741   } else {
4742     return  vsm()->allocate(word_size);
4743   }
4744 }
4745 
4746 MetaWord* ClassLoaderMetaspace::expand_and_allocate(size_t word_size, Metaspace::MetadataType mdtype) {
4747   Metaspace::assert_not_frozen();
4748   size_t delta_bytes = MetaspaceGC::delta_capacity_until_GC(word_size * BytesPerWord);
4749   assert(delta_bytes > 0, "Must be");
4750 
4751   size_t before = 0;
4752   size_t after = 0;
4753   MetaWord* res;
4754   bool incremented;
4755 
4756   // Each thread increments the HWM at most once. Even if the thread fails to increment
4757   // the HWM, an allocation is still attempted. This is because another thread must then
4758   // have incremented the HWM and therefore the allocation might still succeed.
4759   do {
4760     incremented = MetaspaceGC::inc_capacity_until_GC(delta_bytes, &after, &before);
4761     res = allocate(word_size, mdtype);
4762   } while (!incremented && res == NULL);
4763 
4764   if (incremented) {
4765     Metaspace::tracer()->report_gc_threshold(before, after,
4766                                   MetaspaceGCThresholdUpdater::ExpandAndAllocate);
4767     log_trace(gc, metaspace)("Increase capacity to GC from " SIZE_FORMAT " to " SIZE_FORMAT, before, after);
4768   }
4769 
4770   return res;
4771 }
4772 
4773 size_t ClassLoaderMetaspace::used_words_slow(Metaspace::MetadataType mdtype) const {
4774   if (mdtype == Metaspace::ClassType) {
4775     return Metaspace::using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
4776   } else {
4777     return vsm()->sum_used_in_chunks_in_use();  // includes overhead!
4778   }
4779 }
4780 
4781 size_t ClassLoaderMetaspace::free_words_slow(Metaspace::MetadataType mdtype) const {
4782   Metaspace::assert_not_frozen();
4783   if (mdtype == Metaspace::ClassType) {
4784     return Metaspace::using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
4785   } else {
4786     return vsm()->sum_free_in_chunks_in_use();
4787   }
4788 }
4789 
4790 // Space capacity in the Metaspace.  It includes
4791 // space in the list of chunks from which allocations
4792 // have been made. Don't include space in the global freelist and
4793 // in the space available in the dictionary which
4794 // is already counted in some chunk.
4795 size_t ClassLoaderMetaspace::capacity_words_slow(Metaspace::MetadataType mdtype) const {
4796   if (mdtype == Metaspace::ClassType) {
4797     return Metaspace::using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
4798   } else {
4799     return vsm()->sum_capacity_in_chunks_in_use();
4800   }
4801 }
4802 
4803 size_t ClassLoaderMetaspace::used_bytes_slow(Metaspace::MetadataType mdtype) const {
4804   return used_words_slow(mdtype) * BytesPerWord;
4805 }
4806 
4807 size_t ClassLoaderMetaspace::capacity_bytes_slow(Metaspace::MetadataType mdtype) const {
4808   return capacity_words_slow(mdtype) * BytesPerWord;
4809 }
4810 
4811 size_t ClassLoaderMetaspace::allocated_blocks_bytes() const {
4812   return vsm()->allocated_blocks_bytes() +
4813       (Metaspace::using_class_space() ? class_vsm()->allocated_blocks_bytes() : 0);
4814 }
4815 
4816 size_t ClassLoaderMetaspace::allocated_chunks_bytes() const {
4817   return vsm()->allocated_chunks_bytes() +
4818       (Metaspace::using_class_space() ? class_vsm()->allocated_chunks_bytes() : 0);
4819 }
4820 
4821 void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
4822   Metaspace::assert_not_frozen();
4823   assert(!SafepointSynchronize::is_at_safepoint()
4824          || Thread::current()->is_VM_thread(), "should be the VM thread");
4825 
4826   MutexLockerEx ml(vsm()->lock(), Mutex::_no_safepoint_check_flag);
4827 
4828   if (is_class && Metaspace::using_class_space()) {
4829     class_vsm()->deallocate(ptr, word_size);
4830   } else {
4831     vsm()->deallocate(ptr, word_size);
4832   }
4833 }
4834 
4835 MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
4836                               MetaspaceObj::Type type, TRAPS) {
4837   assert(!_frozen, "sanity");
4838   if (HAS_PENDING_EXCEPTION) {
4839     assert(false, "Should not allocate with exception pending");
4840     return NULL;  // caller does a CHECK_NULL too
4841   }
4842 
4843   assert(loader_data != NULL, "Should never pass around a NULL loader_data. "
4844         "ClassLoaderData::the_null_class_loader_data() should have been used.");
4845 
4846   MetadataType mdtype = (type == MetaspaceObj::ClassType) ? ClassType : NonClassType;
4847 
4848   // Try to allocate metadata.


4859 
4860     // Allocation failed.
4861     if (is_init_completed()) {
4862       // Only start a GC if the bootstrapping has completed.
4863 
4864       // Try to clean out some memory and retry.
4865       result = Universe::heap()->satisfy_failed_metadata_allocation(loader_data, word_size, mdtype);
4866     }
4867   }
4868 
4869   if (result == NULL) {
4870     report_metadata_oome(loader_data, word_size, type, mdtype, CHECK_NULL);
4871   }
4872 
4873   // Zero initialize.
4874   Copy::fill_to_words((HeapWord*)result, word_size, 0);
4875 
4876   return result;
4877 }
4878 
4879 size_t ClassLoaderMetaspace::class_chunk_size(size_t word_size) {
4880   assert(Metaspace::using_class_space(), "Has to use class space");
4881   return class_vsm()->calc_chunk_size(word_size);
4882 }
4883 
4884 void Metaspace::report_metadata_oome(ClassLoaderData* loader_data, size_t word_size, MetaspaceObj::Type type, MetadataType mdtype, TRAPS) {
4885   tracer()->report_metadata_oom(loader_data, word_size, type, mdtype);
4886 
4887   // If result is still null, we are out of memory.
4888   Log(gc, metaspace, freelist) log;
4889   if (log.is_info()) {
4890     log.info("Metaspace (%s) allocation failed for size " SIZE_FORMAT,
4891              is_class_space_allocation(mdtype) ? "class" : "data", word_size);
4892     ResourceMark rm;
4893     if (log.is_debug()) {
4894       if (loader_data->metaspace_or_null() != NULL) {
4895         LogStream ls(log.debug());
4896         loader_data->print_value_on(&ls);
4897       }
4898     }
4899     LogStream ls(log.info());
4900     MetaspaceUtils::dump(&ls);
4901     MetaspaceUtils::print_metaspace_map(&ls, mdtype);
4902     ChunkManager::print_all_chunkmanagers(&ls);
4903   }
4904 
4905   bool out_of_compressed_class_space = false;
4906   if (is_class_space_allocation(mdtype)) {
4907     ClassLoaderMetaspace* metaspace = loader_data->metaspace_non_null();
4908     out_of_compressed_class_space =
4909       MetaspaceUtils::committed_bytes(Metaspace::ClassType) +
4910       (metaspace->class_chunk_size(word_size) * BytesPerWord) >
4911       CompressedClassSpaceSize;
4912   }
4913 
4914   // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
4915   const char* space_string = out_of_compressed_class_space ?
4916     "Compressed class space" : "Metaspace";
4917 
4918   report_java_out_of_memory(space_string);
4919 
4920   if (JvmtiExport::should_post_resource_exhausted()) {
4921     JvmtiExport::post_resource_exhausted(
4922         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR,
4923         space_string);
4924   }
4925 
4926   if (!is_init_completed()) {
4927     vm_exit_during_initialization("OutOfMemoryError", space_string);


4940     case Metaspace::NonClassType: return "Metadata";
4941     default:
4942       assert(false, "Got bad mdtype: %d", (int) mdtype);
4943       return NULL;
4944   }
4945 }
4946 
4947 void Metaspace::purge(MetadataType mdtype) {
4948   get_space_list(mdtype)->purge(get_chunk_manager(mdtype));
4949 }
4950 
4951 void Metaspace::purge() {
4952   MutexLockerEx cl(SpaceManager::expand_lock(),
4953                    Mutex::_no_safepoint_check_flag);
4954   purge(NonClassType);
4955   if (using_class_space()) {
4956     purge(ClassType);
4957   }
4958 }
4959 
4960 void ClassLoaderMetaspace::print_on(outputStream* out) const {
4961   // Print both class virtual space counts and metaspace.
4962   if (Verbose) {
4963     vsm()->print_on(out);
4964     if (Metaspace::using_class_space()) {
4965       class_vsm()->print_on(out);
4966     }
4967   }
4968 }
4969 
4970 bool Metaspace::contains(const void* ptr) {
4971   if (MetaspaceShared::is_in_shared_metaspace(ptr)) {
4972     return true;
4973   }
4974   return contains_non_shared(ptr);
4975 }
4976 
4977 bool Metaspace::contains_non_shared(const void* ptr) {
4978   if (using_class_space() && get_space_list(ClassType)->contains(ptr)) {
4979      return true;
4980   }
4981 
4982   return get_space_list(NonClassType)->contains(ptr);
4983 }
4984 
4985 void ClassLoaderMetaspace::verify() {
4986   vsm()->verify();
4987   if (Metaspace::using_class_space()) {
4988     class_vsm()->verify();
4989   }
4990 }
4991 
4992 void ClassLoaderMetaspace::dump(outputStream* const out) const {
4993   out->print_cr("\nVirtual space manager: " INTPTR_FORMAT, p2i(vsm()));
4994   vsm()->dump(out);
4995   if (Metaspace::using_class_space()) {
4996     out->print_cr("\nClass space manager: " INTPTR_FORMAT, p2i(class_vsm()));
4997     class_vsm()->dump(out);
4998   }
4999 }
5000 
5001 #ifdef ASSERT
5002 static void do_verify_chunk(Metachunk* chunk) {
5003   guarantee(chunk != NULL, "Sanity");
5004   // Verify chunk itself; then verify that it is consistent with the
5005   // occupany map of its containing node.
5006   chunk->verify();
5007   VirtualSpaceNode* const vsn = chunk->container();
5008   OccupancyMap* const ocmap = vsn->occupancy_map();
5009   ocmap->verify_for_chunk(chunk);
5010 }
5011 #endif
5012 
5013 static void do_update_in_use_info_for_chunk(Metachunk* chunk, bool inuse) {
5014   chunk->set_is_tagged_free(!inuse);
5015   OccupancyMap* const ocmap = chunk->container()->occupancy_map();


< prev index next >