< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page

        

*** 93,103 **** _the_null_class_loader_data = new ClassLoaderData(Handle(), false); ClassLoaderDataGraph::_head = _the_null_class_loader_data; assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); ! LogTarget(Debug, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("create "); _the_null_class_loader_data->print_value_on(&ls); --- 93,103 ---- _the_null_class_loader_data = new ClassLoaderData(Handle(), false); ClassLoaderDataGraph::_head = _the_null_class_loader_data; assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be"); ! LogTarget(Trace, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("create "); _the_null_class_loader_data->print_value_on(&ls);
*** 590,611 **** } void ClassLoaderData::unload() { _unloading = true; ! LogTarget(Debug, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("unload"); print_value_on(&ls); ls.cr(); } // Some items on the _deallocate_list need to free their C heap structures // if they are not already on the _klasses list. ! unload_deallocate_list(); // Tell serviceability tools these classes are unloading // after erroneous classes are released. classes_do(InstanceKlass::notify_unload_class); --- 590,611 ---- } void ClassLoaderData::unload() { _unloading = true; ! LogTarget(Trace, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("unload"); print_value_on(&ls); ls.cr(); } // Some items on the _deallocate_list need to free their C heap structures // if they are not already on the _klasses list. ! free_deallocate_list_C_heap_structures(); // Tell serviceability tools these classes are unloading // after erroneous classes are released. classes_do(InstanceKlass::notify_unload_class);
*** 844,862 **** dest = _handles.add(h()); } } // Add this metadata pointer to be freed when it's safe. This is only during ! // class unloading because Handles might point to this metadata field. void ClassLoaderData::add_to_deallocate_list(Metadata* m) { // Metadata in shared region isn't deleted. if (!m->is_shared()) { MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); if (_deallocate_list == NULL) { _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); } _deallocate_list->append_if_missing(m); } } // Deallocate free metadata on the free list. How useful the PermGen was! void ClassLoaderData::free_deallocate_list() { --- 844,864 ---- dest = _handles.add(h()); } } // Add this metadata pointer to be freed when it's safe. This is only during ! // a safepoint which checks if handles point to this metadata field. void ClassLoaderData::add_to_deallocate_list(Metadata* m) { // Metadata in shared region isn't deleted. if (!m->is_shared()) { MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag); if (_deallocate_list == NULL) { _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true); } _deallocate_list->append_if_missing(m); + log_debug(class, loader, data)("deallocate added for %s", m->print_value_string()); + ClassLoaderDataGraph::set_should_clean_deallocate_lists(); } } // Deallocate free metadata on the free list. How useful the PermGen was! void ClassLoaderData::free_deallocate_list() {
*** 886,915 **** // Metadata is alive. // If scratch_class is on stack then it shouldn't be on this list! assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), "scratch classes on this list should be dead"); // Also should assert that other metadata on the list was found in handles. } } } // This is distinct from free_deallocate_list. For class loader data that are // unloading, this frees the C heap memory for items on the list, and unlinks // scratch or error classes so that unloading events aren't triggered for these // classes. The metadata is removed with the unloading metaspace. // There isn't C heap memory allocated for methods, so nothing is done for them. ! void ClassLoaderData::unload_deallocate_list() { // Don't need lock, at safepoint assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); assert(is_unloading(), "only called for ClassLoaderData that are unloading"); if (_deallocate_list == NULL) { return; } // Go backwards because this removes entries that are freed. for (int i = _deallocate_list->length() - 1; i >= 0; i--) { Metadata* m = _deallocate_list->at(i); - assert (!m->on_stack(), "wouldn't be unloading if this were so"); _deallocate_list->remove_at(i); if (m->is_constantPool()) { ((ConstantPool*)m)->release_C_heap_structures(); } else if (m->is_klass()) { InstanceKlass* ik = (InstanceKlass*)m; --- 888,952 ---- // Metadata is alive. // If scratch_class is on stack then it shouldn't be on this list! assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(), "scratch classes on this list should be dead"); // Also should assert that other metadata on the list was found in handles. + // Some cleaning remains. + ClassLoaderDataGraph::set_should_clean_deallocate_lists(); + } + } + } + + void ClassLoaderDataGraph::clean_deallocate_lists(bool walk_previous_versions) { + uint loaders_processed = 0; + for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) { + // is_alive check will be necessary for concurrent class unloading. + if (cld->is_alive()) { + // clean metaspace + if (walk_previous_versions) { + cld->classes_do(InstanceKlass::purge_previous_versions); + } + cld->free_deallocate_list(); + loaders_processed++; } } + log_debug(class, loader, data)("clean_deallocate_lists: loaders processed %u %s", + loaders_processed, walk_previous_versions ? "walk_previous_versions" : ""); + } + + void ClassLoaderDataGraph::walk_metadata_and_clean_metaspaces() { + assert(SafepointSynchronize::is_at_safepoint(), "must only be called at safepoint"); + + _should_clean_deallocate_lists = false; // assume everything gets cleaned + + // Mark metadata seen on the stack so we can delete unreferenced entries. + // Walk all metadata, including the expensive code cache walk, only for class redefinition. + // The MetadataOnStackMark walk during redefinition saves previous versions if it finds old methods + // on the stack or in the code cache, so we only have to repeat the full walk if + // they were found at that time. + // TODO: have redefinition clean old methods out of the code cache. They still exist in some places. + bool walk_all_metadata = InstanceKlass::has_previous_versions_and_reset(); + + MetadataOnStackMark md_on_stack(walk_all_metadata); + clean_deallocate_lists(walk_all_metadata); } // This is distinct from free_deallocate_list. For class loader data that are // unloading, this frees the C heap memory for items on the list, and unlinks // scratch or error classes so that unloading events aren't triggered for these // classes. The metadata is removed with the unloading metaspace. // There isn't C heap memory allocated for methods, so nothing is done for them. ! void ClassLoaderData::free_deallocate_list_C_heap_structures() { // Don't need lock, at safepoint assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint"); assert(is_unloading(), "only called for ClassLoaderData that are unloading"); if (_deallocate_list == NULL) { return; } // Go backwards because this removes entries that are freed. for (int i = _deallocate_list->length() - 1; i >= 0; i--) { Metadata* m = _deallocate_list->at(i); _deallocate_list->remove_at(i); if (m->is_constantPool()) { ((ConstantPool*)m)->release_C_heap_structures(); } else if (m->is_klass()) { InstanceKlass* ik = (InstanceKlass*)m;
*** 1021,1030 **** --- 1058,1069 ---- ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL; ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL; ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL; bool ClassLoaderDataGraph::_should_purge = false; + bool ClassLoaderDataGraph::_should_clean_deallocate_lists = false; + bool ClassLoaderDataGraph::_safepoint_cleanup_needed = false; bool ClassLoaderDataGraph::_metaspace_oom = false; // Add a new class loader data node to the list. Assign the newly created // ClassLoaderData into the java/lang/ClassLoader object as a hidden field ClassLoaderData* ClassLoaderDataGraph::add_to_graph(Handle loader, bool is_anonymous) {
*** 1051,1061 **** do { cld->set_next(next); ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); if (exchanged == next) { ! LogTarget(Debug, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("create "); cld->print_value_on(&ls); --- 1090,1100 ---- do { cld->set_next(next); ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next); if (exchanged == next) { ! LogTarget(Trace, class, loader, data) lt; if (lt.is_enabled()) { ResourceMark rm; LogStream ls(lt); ls.print("create "); cld->print_value_on(&ls);
*** 1332,1370 **** } #endif // INCLUDE_JFR // Move class loader data from main list to the unloaded list for unloading // and deallocation later. ! bool ClassLoaderDataGraph::do_unloading(bool clean_previous_versions) { ClassLoaderData* data = _head; ClassLoaderData* prev = NULL; bool seen_dead_loader = false; uint loaders_processed = 0; uint loaders_removed = 0; - // Mark metadata seen on the stack only so we can delete unneeded entries. - // Only walk all metadata, including the expensive code cache walk, for Full GC - // and only if class redefinition and if there's previous versions of - // Klasses to delete. - bool walk_all_metadata = clean_previous_versions && - JvmtiExport::has_redefined_a_class() && - InstanceKlass::has_previous_versions_and_reset(); - MetadataOnStackMark md_on_stack(walk_all_metadata); - // Save previous _unloading pointer for CMS which may add to unloading list before // purging and we don't want to rewalk the previously unloaded class loader data. _saved_unloading = _unloading; data = _head; while (data != NULL) { if (data->is_alive()) { - // clean metaspace - if (walk_all_metadata) { - data->classes_do(InstanceKlass::purge_previous_versions); - } - data->free_deallocate_list(); prev = data; data = data->next(); loaders_processed++; continue; } --- 1371,1398 ---- } #endif // INCLUDE_JFR // Move class loader data from main list to the unloaded list for unloading // and deallocation later. ! bool ClassLoaderDataGraph::do_unloading(bool do_cleaning) { ! ! // Indicate whether safepoint cleanup is needed. ! _safepoint_cleanup_needed |= do_cleaning; ClassLoaderData* data = _head; ClassLoaderData* prev = NULL; bool seen_dead_loader = false; uint loaders_processed = 0; uint loaders_removed = 0; // Save previous _unloading pointer for CMS which may add to unloading list before // purging and we don't want to rewalk the previously unloaded class loader data. _saved_unloading = _unloading; data = _head; while (data != NULL) { if (data->is_alive()) { prev = data; data = data->next(); loaders_processed++; continue; }
< prev index next >