467 if (lt.is_enabled()) {
468 ResourceMark rm;
469 LogStream ls(lt);
470 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
471 print_value_on(&ls);
472 ls.cr();
473 }
474 }
475 }
476
477 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
478 if (loader_or_mirror() != NULL) {
479 assert(_holder.is_null(), "never replace holders");
480 _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror);
481 }
482 }
483
484 // Remove a klass from the _klasses list for scratch_class during redefinition
485 // or parsed class in the case of an error.
486 void ClassLoaderData::remove_class(Klass* scratch_class) {
487 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
488
489 // Adjust global class iterator.
490 ClassLoaderDataGraph::adjust_saved_class(scratch_class);
491
492 Klass* prev = NULL;
493 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
494 if (k == scratch_class) {
495 if (prev == NULL) {
496 _klasses = k->next_link();
497 } else {
498 Klass* next = k->next_link();
499 prev->set_next_link(next);
500 }
501
502 if (k->is_array_klass()) {
503 ClassLoaderDataGraph::dec_array_classes(1);
504 } else {
505 ClassLoaderDataGraph::dec_instance_classes(1);
506 }
507
787 }
788 }
789
790 // Add this metadata pointer to be freed when it's safe. This is only during
791 // a safepoint which checks if handles point to this metadata field.
792 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
793 // Metadata in shared region isn't deleted.
794 if (!m->is_shared()) {
795 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
796 if (_deallocate_list == NULL) {
797 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
798 }
799 _deallocate_list->append_if_missing(m);
800 log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
801 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
802 }
803 }
804
805 // Deallocate free metadata on the free list. How useful the PermGen was!
806 void ClassLoaderData::free_deallocate_list() {
807 // Don't need lock, at safepoint
808 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
809 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
810 if (_deallocate_list == NULL) {
811 return;
812 }
813 // Go backwards because this removes entries that are freed.
814 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
815 Metadata* m = _deallocate_list->at(i);
816 if (!m->on_stack()) {
817 _deallocate_list->remove_at(i);
818 // There are only three types of metadata that we deallocate directly.
819 // Cast them so they can be used by the template function.
820 if (m->is_method()) {
821 MetadataFactory::free_metadata(this, (Method*)m);
822 } else if (m->is_constantPool()) {
823 MetadataFactory::free_metadata(this, (ConstantPool*)m);
824 } else if (m->is_klass()) {
825 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
826 } else {
827 ShouldNotReachHere();
828 }
829 } else {
830 // Metadata is alive.
831 // If scratch_class is on stack then it shouldn't be on this list!
832 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
833 "scratch classes on this list should be dead");
834 // Also should assert that other metadata on the list was found in handles.
835 // Some cleaning remains.
836 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
837 }
838 }
839 }
840
841 // This is distinct from free_deallocate_list. For class loader data that are
842 // unloading, this frees the C heap memory for items on the list, and unlinks
843 // scratch or error classes so that unloading events aren't triggered for these
844 // classes. The metadata is removed with the unloading metaspace.
845 // There isn't C heap memory allocated for methods, so nothing is done for them.
846 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
847 // Don't need lock, at safepoint
848 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
849 assert(is_unloading(), "only called for ClassLoaderData that are unloading");
850 if (_deallocate_list == NULL) {
851 return;
852 }
853 // Go backwards because this removes entries that are freed.
854 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
855 Metadata* m = _deallocate_list->at(i);
856 _deallocate_list->remove_at(i);
857 if (m->is_constantPool()) {
858 ((ConstantPool*)m)->release_C_heap_structures();
859 } else if (m->is_klass()) {
860 InstanceKlass* ik = (InstanceKlass*)m;
861 // also releases ik->constants() C heap memory
862 InstanceKlass::release_C_heap_structures(ik);
863 // Remove the class so unloading events aren't triggered for
864 // this class (scratch or error class) in do_unloading().
865 remove_class(ik);
866 }
867 }
868 }
|
467 if (lt.is_enabled()) {
468 ResourceMark rm;
469 LogStream ls(lt);
470 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
471 print_value_on(&ls);
472 ls.cr();
473 }
474 }
475 }
476
477 void ClassLoaderData::initialize_holder(Handle loader_or_mirror) {
478 if (loader_or_mirror() != NULL) {
479 assert(_holder.is_null(), "never replace holders");
480 _holder = WeakHandle<vm_class_loader_data>::create(loader_or_mirror);
481 }
482 }
483
484 // Remove a klass from the _klasses list for scratch_class during redefinition
485 // or parsed class in the case of an error.
486 void ClassLoaderData::remove_class(Klass* scratch_class) {
487 assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
488
489 // Adjust global class iterator.
490 ClassLoaderDataGraph::adjust_saved_class(scratch_class);
491
492 Klass* prev = NULL;
493 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
494 if (k == scratch_class) {
495 if (prev == NULL) {
496 _klasses = k->next_link();
497 } else {
498 Klass* next = k->next_link();
499 prev->set_next_link(next);
500 }
501
502 if (k->is_array_klass()) {
503 ClassLoaderDataGraph::dec_array_classes(1);
504 } else {
505 ClassLoaderDataGraph::dec_instance_classes(1);
506 }
507
787 }
788 }
789
790 // Add this metadata pointer to be freed when it's safe. This is only during
791 // a safepoint which checks if handles point to this metadata field.
792 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
793 // Metadata in shared region isn't deleted.
794 if (!m->is_shared()) {
795 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
796 if (_deallocate_list == NULL) {
797 _deallocate_list = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(100, true);
798 }
799 _deallocate_list->append_if_missing(m);
800 log_debug(class, loader, data)("deallocate added for %s", m->print_value_string());
801 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
802 }
803 }
804
805 // Deallocate free metadata on the free list. How useful the PermGen was!
806 void ClassLoaderData::free_deallocate_list() {
807 // This must be called at a safepoint because it depends on metadata walking at
808 // safepoint cleanup time.
809 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
810 assert(!is_unloading(), "only called for ClassLoaderData that are not unloading");
811 if (_deallocate_list == NULL) {
812 return;
813 }
814 // Go backwards because this removes entries that are freed.
815 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
816 Metadata* m = _deallocate_list->at(i);
817 if (!m->on_stack()) {
818 _deallocate_list->remove_at(i);
819 // There are only three types of metadata that we deallocate directly.
820 // Cast them so they can be used by the template function.
821 if (m->is_method()) {
822 MetadataFactory::free_metadata(this, (Method*)m);
823 } else if (m->is_constantPool()) {
824 MetadataFactory::free_metadata(this, (ConstantPool*)m);
825 } else if (m->is_klass()) {
826 MetadataFactory::free_metadata(this, (InstanceKlass*)m);
827 } else {
828 ShouldNotReachHere();
829 }
830 } else {
831 // Metadata is alive.
832 // If scratch_class is on stack then it shouldn't be on this list!
833 assert(!m->is_klass() || !((InstanceKlass*)m)->is_scratch_class(),
834 "scratch classes on this list should be dead");
835 // Also should assert that other metadata on the list was found in handles.
836 // Some cleaning remains.
837 ClassLoaderDataGraph::set_should_clean_deallocate_lists();
838 }
839 }
840 }
841
842 // This is distinct from free_deallocate_list. For class loader data that are
843 // unloading, this frees the C heap memory for items on the list, and unlinks
844 // scratch or error classes so that unloading events aren't triggered for these
845 // classes. The metadata is removed with the unloading metaspace.
846 // There isn't C heap memory allocated for methods, so nothing is done for them.
847 void ClassLoaderData::free_deallocate_list_C_heap_structures() {
848 assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
849 assert(is_unloading(), "only called for ClassLoaderData that are unloading");
850 if (_deallocate_list == NULL) {
851 return;
852 }
853 // Go backwards because this removes entries that are freed.
854 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
855 Metadata* m = _deallocate_list->at(i);
856 _deallocate_list->remove_at(i);
857 if (m->is_constantPool()) {
858 ((ConstantPool*)m)->release_C_heap_structures();
859 } else if (m->is_klass()) {
860 InstanceKlass* ik = (InstanceKlass*)m;
861 // also releases ik->constants() C heap memory
862 InstanceKlass::release_C_heap_structures(ik);
863 // Remove the class so unloading events aren't triggered for
864 // this class (scratch or error class) in do_unloading().
865 remove_class(ik);
866 }
867 }
868 }
|