1 /* 2 * Copyright (c) 2002, 2020, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/classLoaderData.inline.hpp" 27 #include "classfile/classLoaderDataGraph.hpp" 28 #include "classfile/moduleEntry.hpp" 29 #include "classfile/systemDictionary.hpp" 30 #include "gc/shared/collectedHeap.hpp" 31 #include "logging/log.hpp" 32 #include "logging/logTag.hpp" 33 #include "memory/heapInspection.hpp" 34 #include "memory/resourceArea.hpp" 35 #include "memory/universe.hpp" 36 #include "oops/oop.inline.hpp" 37 #include "oops/reflectionAccessorImplKlassHelper.hpp" 38 #include "runtime/os.hpp" 39 #include "utilities/globalDefinitions.hpp" 40 #include "utilities/macros.hpp" 41 #include "utilities/stack.inline.hpp" 42 43 // HeapInspection 44 45 inline KlassInfoEntry::~KlassInfoEntry() { 46 if (_subclasses != NULL) { 47 delete _subclasses; 48 } 49 } 50 51 inline void KlassInfoEntry::add_subclass(KlassInfoEntry* cie) { 52 if (_subclasses == NULL) { 53 _subclasses = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<KlassInfoEntry*>(4, true); 54 } 55 _subclasses->append(cie); 56 } 57 58 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) { 59 if(e1->_instance_words > e2->_instance_words) { 60 return -1; 61 } else if(e1->_instance_words < e2->_instance_words) { 62 return 1; 63 } 64 // Sort alphabetically, note 'Z' < '[' < 'a', but it's better to group 65 // the array classes before all the instance classes. 66 ResourceMark rm; 67 const char* name1 = e1->klass()->external_name(); 68 const char* name2 = e2->klass()->external_name(); 69 bool d1 = (name1[0] == JVM_SIGNATURE_ARRAY); 70 bool d2 = (name2[0] == JVM_SIGNATURE_ARRAY); 71 if (d1 && !d2) { 72 return -1; 73 } else if (d2 && !d1) { 74 return 1; 75 } else { 76 return strcmp(name1, name2); 77 } 78 } 79 80 const char* KlassInfoEntry::name() const { 81 const char* name; 82 if (_klass->name() != NULL) { 83 name = _klass->external_name(); 84 } else { 85 if (_klass == Universe::boolArrayKlassObj()) name = "<boolArrayKlass>"; else 86 if (_klass == Universe::charArrayKlassObj()) name = "<charArrayKlass>"; else 87 if (_klass == Universe::floatArrayKlassObj()) name = "<floatArrayKlass>"; else 88 if (_klass == Universe::doubleArrayKlassObj()) name = "<doubleArrayKlass>"; else 89 if (_klass == Universe::byteArrayKlassObj()) name = "<byteArrayKlass>"; else 90 if (_klass == Universe::shortArrayKlassObj()) name = "<shortArrayKlass>"; else 91 if (_klass == Universe::intArrayKlassObj()) name = "<intArrayKlass>"; else 92 if (_klass == Universe::longArrayKlassObj()) name = "<longArrayKlass>"; else 93 name = "<no name>"; 94 } 95 return name; 96 } 97 98 void KlassInfoEntry::print_on(outputStream* st) const { 99 ResourceMark rm; 100 101 // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit 102 ModuleEntry* module = _klass->module(); 103 if (module->is_named()) { 104 st->print_cr(INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13) " %s (%s@%s)", 105 (int64_t)_instance_count, 106 (uint64_t)_instance_words * HeapWordSize, 107 name(), 108 module->name()->as_C_string(), 109 module->version() != NULL ? module->version()->as_C_string() : ""); 110 } else { 111 st->print_cr(INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13) " %s", 112 (int64_t)_instance_count, 113 (uint64_t)_instance_words * HeapWordSize, 114 name()); 115 } 116 } 117 118 KlassInfoEntry* KlassInfoBucket::lookup(Klass* const k) { 119 // Can happen if k is an archived class that we haven't loaded yet. 120 if (k->java_mirror_no_keepalive() == NULL) { 121 return NULL; 122 } 123 124 KlassInfoEntry* elt = _list; 125 while (elt != NULL) { 126 if (elt->is_equal(k)) { 127 return elt; 128 } 129 elt = elt->next(); 130 } 131 elt = new (std::nothrow) KlassInfoEntry(k, list()); 132 // We may be out of space to allocate the new entry. 133 if (elt != NULL) { 134 set_list(elt); 135 } 136 return elt; 137 } 138 139 void KlassInfoBucket::iterate(KlassInfoClosure* cic) { 140 KlassInfoEntry* elt = _list; 141 while (elt != NULL) { 142 cic->do_cinfo(elt); 143 elt = elt->next(); 144 } 145 } 146 147 void KlassInfoBucket::empty() { 148 KlassInfoEntry* elt = _list; 149 _list = NULL; 150 while (elt != NULL) { 151 KlassInfoEntry* next = elt->next(); 152 delete elt; 153 elt = next; 154 } 155 } 156 157 class KlassInfoTable::AllClassesFinder : public LockedClassesDo { 158 KlassInfoTable *_table; 159 public: 160 AllClassesFinder(KlassInfoTable* table) : _table(table) {} 161 virtual void do_klass(Klass* k) { 162 // This has the SIDE EFFECT of creating a KlassInfoEntry 163 // for <k>, if one doesn't exist yet. 164 _table->lookup(k); 165 } 166 }; 167 168 169 KlassInfoTable::KlassInfoTable(bool add_all_classes) { 170 _size_of_instances_in_words = 0; 171 _ref = (HeapWord*) Universe::boolArrayKlassObj(); 172 _buckets = 173 (KlassInfoBucket*) AllocateHeap(sizeof(KlassInfoBucket) * _num_buckets, 174 mtInternal, CURRENT_PC, AllocFailStrategy::RETURN_NULL); 175 if (_buckets != NULL) { 176 for (int index = 0; index < _num_buckets; index++) { 177 _buckets[index].initialize(); 178 } 179 if (add_all_classes) { 180 AllClassesFinder finder(this); 181 ClassLoaderDataGraph::classes_do(&finder); 182 } 183 } 184 } 185 186 KlassInfoTable::~KlassInfoTable() { 187 if (_buckets != NULL) { 188 for (int index = 0; index < _num_buckets; index++) { 189 _buckets[index].empty(); 190 } 191 FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets); 192 _buckets = NULL; 193 } 194 } 195 196 uint KlassInfoTable::hash(const Klass* p) { 197 return (uint)(((uintptr_t)p - (uintptr_t)_ref) >> 2); 198 } 199 200 KlassInfoEntry* KlassInfoTable::lookup(Klass* k) { 201 uint idx = hash(k) % _num_buckets; 202 assert(_buckets != NULL, "Allocation failure should have been caught"); 203 KlassInfoEntry* e = _buckets[idx].lookup(k); 204 // Lookup may fail if this is a new klass for which we 205 // could not allocate space for an new entry, or if it's 206 // an archived class that we haven't loaded yet. 207 assert(e == NULL || k == e->klass(), "must be equal"); 208 return e; 209 } 210 211 // Return false if the entry could not be recorded on account 212 // of running out of space required to create a new entry. 213 bool KlassInfoTable::record_instance(const oop obj) { 214 Klass* k = obj->klass(); 215 KlassInfoEntry* elt = lookup(k); 216 // elt may be NULL if it's a new klass for which we 217 // could not allocate space for a new entry in the hashtable. 218 if (elt != NULL) { 219 elt->set_count(elt->count() + 1); 220 elt->set_words(elt->words() + obj->size()); 221 _size_of_instances_in_words += obj->size(); 222 return true; 223 } else { 224 return false; 225 } 226 } 227 228 void KlassInfoTable::iterate(KlassInfoClosure* cic) { 229 assert(_buckets != NULL, "Allocation failure should have been caught"); 230 for (int index = 0; index < _num_buckets; index++) { 231 _buckets[index].iterate(cic); 232 } 233 } 234 235 size_t KlassInfoTable::size_of_instances_in_words() const { 236 return _size_of_instances_in_words; 237 } 238 239 // Return false if the entry could not be recorded on account 240 // of running out of space required to create a new entry. 241 bool KlassInfoTable::merge_entry(const KlassInfoEntry* cie) { 242 Klass* k = cie->klass(); 243 KlassInfoEntry* elt = lookup(k); 244 // elt may be NULL if it's a new klass for which we 245 // could not allocate space for a new entry in the hashtable. 246 if (elt != NULL) { 247 elt->set_count(elt->count() + cie->count()); 248 elt->set_words(elt->words() + cie->words()); 249 _size_of_instances_in_words += cie->words(); 250 return true; 251 } else { 252 return false; 253 } 254 } 255 256 class KlassInfoTableMergeClosure : public KlassInfoClosure { 257 private: 258 KlassInfoTable* _dest; 259 bool _success; 260 public: 261 KlassInfoTableMergeClosure(KlassInfoTable* table) : _dest(table), _success(true) {} 262 void do_cinfo(KlassInfoEntry* cie) { 263 _success &= _dest->merge_entry(cie); 264 } 265 bool is_success() { return _success; } 266 }; 267 268 // merge from table 269 bool KlassInfoTable::merge(KlassInfoTable* table) { 270 KlassInfoTableMergeClosure closure(this); 271 table->iterate(&closure); 272 return closure.is_success(); 273 } 274 275 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) { 276 return (*e1)->compare(*e1,*e2); 277 } 278 279 KlassInfoHisto::KlassInfoHisto(KlassInfoTable* cit) : 280 _cit(cit) { 281 _elements = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<KlassInfoEntry*>(_histo_initial_size, true); 282 } 283 284 KlassInfoHisto::~KlassInfoHisto() { 285 delete _elements; 286 } 287 288 void KlassInfoHisto::add(KlassInfoEntry* cie) { 289 elements()->append(cie); 290 } 291 292 void KlassInfoHisto::sort() { 293 elements()->sort(KlassInfoHisto::sort_helper); 294 } 295 296 void KlassInfoHisto::print_elements(outputStream* st) const { 297 // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit 298 int64_t total = 0; 299 uint64_t totalw = 0; 300 for(int i=0; i < elements()->length(); i++) { 301 st->print("%4d: ", i+1); 302 elements()->at(i)->print_on(st); 303 total += elements()->at(i)->count(); 304 totalw += elements()->at(i)->words(); 305 } 306 st->print_cr("Total " INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13), 307 total, totalw * HeapWordSize); 308 } 309 310 class HierarchyClosure : public KlassInfoClosure { 311 private: 312 GrowableArray<KlassInfoEntry*> *_elements; 313 public: 314 HierarchyClosure(GrowableArray<KlassInfoEntry*> *_elements) : _elements(_elements) {} 315 316 void do_cinfo(KlassInfoEntry* cie) { 317 // ignore array classes 318 if (cie->klass()->is_instance_klass()) { 319 _elements->append(cie); 320 } 321 } 322 }; 323 324 void KlassHierarchy::print_class_hierarchy(outputStream* st, bool print_interfaces, 325 bool print_subclasses, char* classname) { 326 ResourceMark rm; 327 Stack <KlassInfoEntry*, mtClass> class_stack; 328 GrowableArray<KlassInfoEntry*> elements; 329 330 // Add all classes to the KlassInfoTable, which allows for quick lookup. 331 // A KlassInfoEntry will be created for each class. 332 KlassInfoTable cit(true); 333 if (cit.allocation_failed()) { 334 st->print_cr("ERROR: Ran out of C-heap; hierarchy not generated"); 335 return; 336 } 337 338 // Add all created KlassInfoEntry instances to the elements array for easy 339 // iteration, and to allow each KlassInfoEntry instance to have a unique index. 340 HierarchyClosure hc(&elements); 341 cit.iterate(&hc); 342 343 for(int i = 0; i < elements.length(); i++) { 344 KlassInfoEntry* cie = elements.at(i); 345 Klass* super = cie->klass()->super(); 346 347 // Set the index for the class. 348 cie->set_index(i + 1); 349 350 // Add the class to the subclass array of its superclass. 351 if (super != NULL) { 352 KlassInfoEntry* super_cie = cit.lookup(super); 353 assert(super_cie != NULL, "could not lookup superclass"); 354 super_cie->add_subclass(cie); 355 } 356 } 357 358 // Set the do_print flag for each class that should be printed. 359 for(int i = 0; i < elements.length(); i++) { 360 KlassInfoEntry* cie = elements.at(i); 361 if (classname == NULL) { 362 // We are printing all classes. 363 cie->set_do_print(true); 364 } else { 365 // We are only printing the hierarchy of a specific class. 366 if (strcmp(classname, cie->klass()->external_name()) == 0) { 367 KlassHierarchy::set_do_print_for_class_hierarchy(cie, &cit, print_subclasses); 368 } 369 } 370 } 371 372 // Now we do a depth first traversal of the class hierachry. The class_stack will 373 // maintain the list of classes we still need to process. Start things off 374 // by priming it with java.lang.Object. 375 KlassInfoEntry* jlo_cie = cit.lookup(SystemDictionary::Object_klass()); 376 assert(jlo_cie != NULL, "could not lookup java.lang.Object"); 377 class_stack.push(jlo_cie); 378 379 // Repeatedly pop the top item off the stack, print its class info, 380 // and push all of its subclasses on to the stack. Do this until there 381 // are no classes left on the stack. 382 while (!class_stack.is_empty()) { 383 KlassInfoEntry* curr_cie = class_stack.pop(); 384 if (curr_cie->do_print()) { 385 print_class(st, curr_cie, print_interfaces); 386 if (curr_cie->subclasses() != NULL) { 387 // Current class has subclasses, so push all of them onto the stack. 388 for (int i = 0; i < curr_cie->subclasses()->length(); i++) { 389 KlassInfoEntry* cie = curr_cie->subclasses()->at(i); 390 if (cie->do_print()) { 391 class_stack.push(cie); 392 } 393 } 394 } 395 } 396 } 397 398 st->flush(); 399 } 400 401 // Sets the do_print flag for every superclass and subclass of the specified class. 402 void KlassHierarchy::set_do_print_for_class_hierarchy(KlassInfoEntry* cie, KlassInfoTable* cit, 403 bool print_subclasses) { 404 // Set do_print for all superclasses of this class. 405 Klass* super = ((InstanceKlass*)cie->klass())->java_super(); 406 while (super != NULL) { 407 KlassInfoEntry* super_cie = cit->lookup(super); 408 super_cie->set_do_print(true); 409 super = super->super(); 410 } 411 412 // Set do_print for this class and all of its subclasses. 413 Stack <KlassInfoEntry*, mtClass> class_stack; 414 class_stack.push(cie); 415 while (!class_stack.is_empty()) { 416 KlassInfoEntry* curr_cie = class_stack.pop(); 417 curr_cie->set_do_print(true); 418 if (print_subclasses && curr_cie->subclasses() != NULL) { 419 // Current class has subclasses, so push all of them onto the stack. 420 for (int i = 0; i < curr_cie->subclasses()->length(); i++) { 421 KlassInfoEntry* cie = curr_cie->subclasses()->at(i); 422 class_stack.push(cie); 423 } 424 } 425 } 426 } 427 428 static void print_indent(outputStream* st, int indent) { 429 while (indent != 0) { 430 st->print("|"); 431 indent--; 432 if (indent != 0) { 433 st->print(" "); 434 } 435 } 436 } 437 438 // Print the class name and its unique ClassLoader identifer. 439 static void print_classname(outputStream* st, Klass* klass) { 440 oop loader_oop = klass->class_loader_data()->class_loader(); 441 st->print("%s/", klass->external_name()); 442 if (loader_oop == NULL) { 443 st->print("null"); 444 } else { 445 st->print(INTPTR_FORMAT, p2i(klass->class_loader_data())); 446 } 447 } 448 449 static void print_interface(outputStream* st, InstanceKlass* intf_klass, const char* intf_type, int indent) { 450 print_indent(st, indent); 451 st->print(" implements "); 452 print_classname(st, intf_klass); 453 st->print(" (%s intf)\n", intf_type); 454 } 455 456 void KlassHierarchy::print_class(outputStream* st, KlassInfoEntry* cie, bool print_interfaces) { 457 ResourceMark rm; 458 InstanceKlass* klass = (InstanceKlass*)cie->klass(); 459 int indent = 0; 460 461 // Print indentation with proper indicators of superclass. 462 Klass* super = klass->super(); 463 while (super != NULL) { 464 super = super->super(); 465 indent++; 466 } 467 print_indent(st, indent); 468 if (indent != 0) st->print("--"); 469 470 // Print the class name, its unique ClassLoader identifer, and if it is an interface. 471 print_classname(st, klass); 472 if (klass->is_interface()) { 473 st->print(" (intf)"); 474 } 475 // Special treatment for generated core reflection accessor classes: print invocation target. 476 if (ReflectionAccessorImplKlassHelper::is_generated_accessor(klass)) { 477 st->print(" (invokes: "); 478 ReflectionAccessorImplKlassHelper::print_invocation_target(st, klass); 479 st->print(")"); 480 } 481 st->print("\n"); 482 483 // Print any interfaces the class has. 484 if (print_interfaces) { 485 Array<InstanceKlass*>* local_intfs = klass->local_interfaces(); 486 Array<InstanceKlass*>* trans_intfs = klass->transitive_interfaces(); 487 for (int i = 0; i < local_intfs->length(); i++) { 488 print_interface(st, local_intfs->at(i), "declared", indent); 489 } 490 for (int i = 0; i < trans_intfs->length(); i++) { 491 InstanceKlass* trans_interface = trans_intfs->at(i); 492 // Only print transitive interfaces if they are not also declared. 493 if (!local_intfs->contains(trans_interface)) { 494 print_interface(st, trans_interface, "inherited", indent); 495 } 496 } 497 } 498 } 499 500 void KlassInfoHisto::print_histo_on(outputStream* st) { 501 st->print_cr(" num #instances #bytes class name (module)"); 502 st->print_cr("-------------------------------------------------------"); 503 print_elements(st); 504 } 505 506 class HistoClosure : public KlassInfoClosure { 507 private: 508 KlassInfoHisto* _cih; 509 public: 510 HistoClosure(KlassInfoHisto* cih) : _cih(cih) {} 511 512 void do_cinfo(KlassInfoEntry* cie) { 513 _cih->add(cie); 514 } 515 }; 516 517 class RecordInstanceClosure : public ObjectClosure { 518 private: 519 KlassInfoTable* _cit; 520 size_t _missed_count; 521 BoolObjectClosure* _filter; 522 public: 523 RecordInstanceClosure(KlassInfoTable* cit, BoolObjectClosure* filter) : 524 _cit(cit), _missed_count(0), _filter(filter) {} 525 526 void do_object(oop obj) { 527 if (should_visit(obj)) { 528 if (!_cit->record_instance(obj)) { 529 _missed_count++; 530 } 531 } 532 } 533 534 size_t missed_count() { return _missed_count; } 535 536 private: 537 bool should_visit(oop obj) { 538 return _filter == NULL || _filter->do_object_b(obj); 539 } 540 }; 541 542 void ParHeapInspectTask::work(uint worker_id) { 543 size_t missed_count = 0; 544 if (!_success) { 545 // other worker has failed on parallel iteration. 546 return; 547 } 548 549 KlassInfoTable cit(false); 550 if (!cit.allocation_failed()) { 551 RecordInstanceClosure ric(&cit, _filter); 552 do_object_iterate_parallel(&ric, worker_id); 553 // _heap->object_iterate_parallel(&ric, worker_id, _par_thread_num); 554 missed_count = ric.missed_count(); 555 } else { 556 // fail to allocate memory, stop parallel mode 557 _success = false; 558 return; 559 } 560 { 561 MutexLocker x(&_mutex); 562 563 if (!_shared_cit->merge(&cit)) { 564 _success = false; 565 return; 566 } 567 *_shared_missed_count += missed_count; 568 } 569 } 570 571 size_t HeapInspection::populate_table(KlassInfoTable* cit, BoolObjectClosure *filter, size_t parallel_thread_num) { 572 ResourceMark rm; 573 size_t missed_count = 0; 574 bool do_serial = (parallel_thread_num == 1); 575 // try parallel first. 576 if (parallel_thread_num > 1) { 577 bool succ = Universe::heap()->run_par_heap_inspect_task(cit, filter, &missed_count, parallel_thread_num); 578 if (succ) { 579 do_serial = false; 580 } else { 581 // heap does not support parallel iteration, or parallel task fail because of native memory oom. 582 // use object_iterate. 583 do_serial = true; 584 missed_count = 0; 585 } 586 } 587 588 if (do_serial) { 589 RecordInstanceClosure ric(cit, filter); 590 Universe::heap()->object_iterate(&ric); 591 missed_count = ric.missed_count(); 592 } 593 return missed_count; 594 } 595 596 void HeapInspection::heap_inspection(outputStream* st, size_t parallel_thread_num) { 597 ResourceMark rm; 598 599 KlassInfoTable cit(false); 600 if (!cit.allocation_failed()) { 601 size_t missed_count = 0;; 602 // populate table with object allocation info 603 missed_count = populate_table(&cit, NULL, parallel_thread_num); 604 if (missed_count != 0) { 605 log_info(gc, classhisto)("WARNING: Ran out of C-heap; undercounted " SIZE_FORMAT 606 " total instances in data below", 607 missed_count); 608 } 609 610 // Sort and print klass instance info 611 KlassInfoHisto histo(&cit); 612 HistoClosure hc(&histo); 613 614 cit.iterate(&hc); 615 616 histo.sort(); 617 histo.print_histo_on(st); 618 } else { 619 st->print_cr("ERROR: Ran out of C-heap; histogram not generated"); 620 } 621 st->flush(); 622 } 623 624 class FindInstanceClosure : public ObjectClosure { 625 private: 626 Klass* _klass; 627 GrowableArray<oop>* _result; 628 629 public: 630 FindInstanceClosure(Klass* k, GrowableArray<oop>* result) : _klass(k), _result(result) {}; 631 632 void do_object(oop obj) { 633 if (obj->is_a(_klass)) { 634 // obj was read with AS_NO_KEEPALIVE, or equivalent. 635 // The object needs to be kept alive when it is published. 636 Universe::heap()->keep_alive(obj); 637 638 _result->append(obj); 639 } 640 } 641 }; 642 643 void HeapInspection::find_instances_at_safepoint(Klass* k, GrowableArray<oop>* result) { 644 assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped"); 645 assert(Heap_lock->is_locked(), "should have the Heap_lock"); 646 647 // Ensure that the heap is parsable 648 Universe::heap()->ensure_parsability(false); // no need to retire TALBs 649 650 // Iterate over objects in the heap 651 FindInstanceClosure fic(k, result); 652 Universe::heap()->object_iterate(&fic); 653 }