1 /* 2 * Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/classLoaderDataGraph.hpp" 27 #include "classfile/javaClasses.inline.hpp" 28 #include "classfile/symbolTable.hpp" 29 #include "classfile/systemDictionary.hpp" 30 #include "classfile/vmSymbols.hpp" 31 #include "jvmtifiles/jvmtiEnv.hpp" 32 #include "logging/log.hpp" 33 #include "memory/allocation.inline.hpp" 34 #include "memory/resourceArea.hpp" 35 #include "memory/universe.hpp" 36 #include "oops/access.inline.hpp" 37 #include "oops/arrayOop.inline.hpp" 38 #include "oops/constantPool.inline.hpp" 39 #include "oops/instanceMirrorKlass.hpp" 40 #include "oops/objArrayKlass.hpp" 41 #include "oops/objArrayOop.inline.hpp" 42 #include "oops/oop.inline.hpp" 43 #include "oops/typeArrayOop.inline.hpp" 44 #include "prims/jvmtiEventController.hpp" 45 #include "prims/jvmtiEventController.inline.hpp" 46 #include "prims/jvmtiExport.hpp" 47 #include "prims/jvmtiImpl.hpp" 48 #include "prims/jvmtiTagMap.hpp" 49 #include "runtime/biasedLocking.hpp" 50 #include "runtime/frame.inline.hpp" 51 #include "runtime/handles.inline.hpp" 52 #include "runtime/javaCalls.hpp" 53 #include "runtime/jniHandles.inline.hpp" 54 #include "runtime/mutex.hpp" 55 #include "runtime/mutexLocker.hpp" 56 #include "runtime/reflectionUtils.hpp" 57 #include "runtime/thread.inline.hpp" 58 #include "runtime/threadSMR.hpp" 59 #include "runtime/vframe.hpp" 60 #include "runtime/vmThread.hpp" 61 #include "runtime/vmOperations.hpp" 62 #include "utilities/macros.hpp" 63 #if INCLUDE_ZGC 64 #include "gc/z/zGlobals.hpp" 65 #endif 66 67 // JvmtiTagHashmapEntry 68 // 69 // Each entry encapsulates a reference to the tagged object 70 // and the tag value. In addition an entry includes a next pointer which 71 // is used to chain entries together. 72 73 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> { 74 private: 75 friend class JvmtiTagMap; 76 77 oop _object; // tagged object 78 jlong _tag; // the tag 79 JvmtiTagHashmapEntry* _next; // next on the list 80 81 inline void init(oop object, jlong tag) { 82 _object = object; 83 _tag = tag; 84 _next = NULL; 85 } 86 87 // constructor 88 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); } 89 90 public: 91 92 // accessor methods 93 inline oop* object_addr() { return &_object; } 94 inline oop object() { return NativeAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); } 95 // Peek at the object without keeping it alive. The returned object must be 96 // kept alive using a normal access if it leaks out of a thread transition from VM. 97 inline oop object_peek() { 98 return NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr()); 99 } 100 inline jlong tag() const { return _tag; } 101 102 inline void set_tag(jlong tag) { 103 assert(tag != 0, "can't be zero"); 104 _tag = tag; 105 } 106 107 inline bool equals(oop object) { 108 return object == object_peek(); 109 } 110 111 inline JvmtiTagHashmapEntry* next() const { return _next; } 112 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; } 113 }; 114 115 116 // JvmtiTagHashmap 117 // 118 // A hashmap is essentially a table of pointers to entries. Entries 119 // are hashed to a location, or position in the table, and then 120 // chained from that location. The "key" for hashing is address of 121 // the object, or oop. The "value" is the tag value. 122 // 123 // A hashmap maintains a count of the number entries in the hashmap 124 // and resizes if the number of entries exceeds a given threshold. 125 // The threshold is specified as a percentage of the size - for 126 // example a threshold of 0.75 will trigger the hashmap to resize 127 // if the number of entries is >75% of table size. 128 // 129 // A hashmap provides functions for adding, removing, and finding 130 // entries. It also provides a function to iterate over all entries 131 // in the hashmap. 132 133 class JvmtiTagHashmap : public CHeapObj<mtInternal> { 134 private: 135 friend class JvmtiTagMap; 136 137 enum { 138 small_trace_threshold = 10000, // threshold for tracing 139 medium_trace_threshold = 100000, 140 large_trace_threshold = 1000000, 141 initial_trace_threshold = small_trace_threshold 142 }; 143 144 static int _sizes[]; // array of possible hashmap sizes 145 int _size; // actual size of the table 146 int _size_index; // index into size table 147 148 int _entry_count; // number of entries in the hashmap 149 150 float _load_factor; // load factor as a % of the size 151 int _resize_threshold; // computed threshold to trigger resizing. 152 bool _resizing_enabled; // indicates if hashmap can resize 153 154 int _trace_threshold; // threshold for trace messages 155 156 JvmtiTagHashmapEntry** _table; // the table of entries. 157 158 // private accessors 159 int resize_threshold() const { return _resize_threshold; } 160 int trace_threshold() const { return _trace_threshold; } 161 162 // initialize the hashmap 163 void init(int size_index=0, float load_factor=4.0f) { 164 int initial_size = _sizes[size_index]; 165 _size_index = size_index; 166 _size = initial_size; 167 _entry_count = 0; 168 _trace_threshold = initial_trace_threshold; 169 _load_factor = load_factor; 170 _resize_threshold = (int)(_load_factor * _size); 171 _resizing_enabled = true; 172 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*); 173 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 174 if (_table == NULL) { 175 vm_exit_out_of_memory(s, OOM_MALLOC_ERROR, 176 "unable to allocate initial hashtable for jvmti object tags"); 177 } 178 for (int i=0; i<initial_size; i++) { 179 _table[i] = NULL; 180 } 181 } 182 183 // hash a given key (oop) with the specified size 184 static unsigned int hash(oop key, int size) { 185 const oop obj = Access<>::resolve(key); 186 const unsigned int hash = Universe::heap()->hash_oop(obj); 187 return hash % size; 188 } 189 190 // hash a given key (oop) 191 unsigned int hash(oop key) { 192 return hash(key, _size); 193 } 194 195 // resize the hashmap - allocates a large table and re-hashes 196 // all entries into the new table. 197 void resize() { 198 int new_size_index = _size_index+1; 199 int new_size = _sizes[new_size_index]; 200 if (new_size < 0) { 201 // hashmap already at maximum capacity 202 return; 203 } 204 205 // allocate new table 206 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*); 207 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 208 if (new_table == NULL) { 209 warning("unable to allocate larger hashtable for jvmti object tags"); 210 set_resizing_enabled(false); 211 return; 212 } 213 214 // initialize new table 215 int i; 216 for (i=0; i<new_size; i++) { 217 new_table[i] = NULL; 218 } 219 220 // rehash all entries into the new table 221 for (i=0; i<_size; i++) { 222 JvmtiTagHashmapEntry* entry = _table[i]; 223 while (entry != NULL) { 224 JvmtiTagHashmapEntry* next = entry->next(); 225 oop key = entry->object_peek(); 226 assert(key != NULL, "jni weak reference cleared!!"); 227 unsigned int h = hash(key, new_size); 228 JvmtiTagHashmapEntry* anchor = new_table[h]; 229 if (anchor == NULL) { 230 new_table[h] = entry; 231 entry->set_next(NULL); 232 } else { 233 entry->set_next(anchor); 234 new_table[h] = entry; 235 } 236 entry = next; 237 } 238 } 239 240 // free old table and update settings. 241 os::free((void*)_table); 242 _table = new_table; 243 _size_index = new_size_index; 244 _size = new_size; 245 246 // compute new resize threshold 247 _resize_threshold = (int)(_load_factor * _size); 248 } 249 250 251 // internal remove function - remove an entry at a given position in the 252 // table. 253 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) { 254 assert(pos >= 0 && pos < _size, "out of range"); 255 if (prev == NULL) { 256 _table[pos] = entry->next(); 257 } else { 258 prev->set_next(entry->next()); 259 } 260 assert(_entry_count > 0, "checking"); 261 _entry_count--; 262 } 263 264 // resizing switch 265 bool is_resizing_enabled() const { return _resizing_enabled; } 266 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; } 267 268 // debugging 269 void print_memory_usage(); 270 void compute_next_trace_threshold(); 271 272 public: 273 274 // create a JvmtiTagHashmap of a preferred size and optionally a load factor. 275 // The preferred size is rounded down to an actual size. 276 JvmtiTagHashmap(int size, float load_factor=0.0f) { 277 int i=0; 278 while (_sizes[i] < size) { 279 if (_sizes[i] < 0) { 280 assert(i > 0, "sanity check"); 281 i--; 282 break; 283 } 284 i++; 285 } 286 287 // if a load factor is specified then use it, otherwise use default 288 if (load_factor > 0.01f) { 289 init(i, load_factor); 290 } else { 291 init(i); 292 } 293 } 294 295 // create a JvmtiTagHashmap with default settings 296 JvmtiTagHashmap() { 297 init(); 298 } 299 300 // release table when JvmtiTagHashmap destroyed 301 ~JvmtiTagHashmap() { 302 if (_table != NULL) { 303 os::free((void*)_table); 304 _table = NULL; 305 } 306 } 307 308 // accessors 309 int size() const { return _size; } 310 JvmtiTagHashmapEntry** table() const { return _table; } 311 int entry_count() const { return _entry_count; } 312 313 // find an entry in the hashmap, returns NULL if not found. 314 inline JvmtiTagHashmapEntry* find(oop key) { 315 unsigned int h = hash(key); 316 JvmtiTagHashmapEntry* entry = _table[h]; 317 while (entry != NULL) { 318 if (entry->equals(key)) { 319 return entry; 320 } 321 entry = entry->next(); 322 } 323 return NULL; 324 } 325 326 327 // add a new entry to hashmap 328 inline void add(oop key, JvmtiTagHashmapEntry* entry) { 329 assert(key != NULL, "checking"); 330 assert(find(key) == NULL, "duplicate detected"); 331 unsigned int h = hash(key); 332 JvmtiTagHashmapEntry* anchor = _table[h]; 333 if (anchor == NULL) { 334 _table[h] = entry; 335 entry->set_next(NULL); 336 } else { 337 entry->set_next(anchor); 338 _table[h] = entry; 339 } 340 341 _entry_count++; 342 if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) { 343 print_memory_usage(); 344 compute_next_trace_threshold(); 345 } 346 347 // if the number of entries exceed the threshold then resize 348 if (entry_count() > resize_threshold() && is_resizing_enabled()) { 349 resize(); 350 } 351 } 352 353 // remove an entry with the given key. 354 inline JvmtiTagHashmapEntry* remove(oop key) { 355 unsigned int h = hash(key); 356 JvmtiTagHashmapEntry* entry = _table[h]; 357 JvmtiTagHashmapEntry* prev = NULL; 358 while (entry != NULL) { 359 if (entry->equals(key)) { 360 break; 361 } 362 prev = entry; 363 entry = entry->next(); 364 } 365 if (entry != NULL) { 366 remove(prev, h, entry); 367 } 368 return entry; 369 } 370 371 // iterate over all entries in the hashmap 372 void entry_iterate(JvmtiTagHashmapEntryClosure* closure); 373 }; 374 375 // possible hashmap sizes - odd primes that roughly double in size. 376 // To avoid excessive resizing the odd primes from 4801-76831 and 377 // 76831-307261 have been removed. The list must be terminated by -1. 378 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891, 379 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 }; 380 381 382 // A supporting class for iterating over all entries in Hashmap 383 class JvmtiTagHashmapEntryClosure { 384 public: 385 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0; 386 }; 387 388 389 // iterate over all entries in the hashmap 390 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 391 for (int i=0; i<_size; i++) { 392 JvmtiTagHashmapEntry* entry = _table[i]; 393 JvmtiTagHashmapEntry* prev = NULL; 394 while (entry != NULL) { 395 // obtain the next entry before invoking do_entry - this is 396 // necessary because do_entry may remove the entry from the 397 // hashmap. 398 JvmtiTagHashmapEntry* next = entry->next(); 399 closure->do_entry(entry); 400 entry = next; 401 } 402 } 403 } 404 405 // debugging 406 void JvmtiTagHashmap::print_memory_usage() { 407 intptr_t p = (intptr_t)this; 408 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p); 409 410 // table + entries in KB 411 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) + 412 entry_count()*sizeof(JvmtiTagHashmapEntry))/K; 413 414 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K); 415 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]", 416 entry_count(), hashmap_usage, weak_globals_usage); 417 } 418 419 // compute threshold for the next trace message 420 void JvmtiTagHashmap::compute_next_trace_threshold() { 421 _trace_threshold = entry_count(); 422 if (trace_threshold() < medium_trace_threshold) { 423 _trace_threshold += small_trace_threshold; 424 } else { 425 if (trace_threshold() < large_trace_threshold) { 426 _trace_threshold += medium_trace_threshold; 427 } else { 428 _trace_threshold += large_trace_threshold; 429 } 430 } 431 } 432 433 // create a JvmtiTagMap 434 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) : 435 _env(env), 436 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false), 437 _free_entries(NULL), 438 _free_entries_count(0) 439 { 440 assert(JvmtiThreadState_lock->is_locked(), "sanity check"); 441 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment"); 442 443 _hashmap = new JvmtiTagHashmap(); 444 445 // finally add us to the environment 446 ((JvmtiEnvBase *)env)->release_set_tag_map(this); 447 } 448 449 450 // destroy a JvmtiTagMap 451 JvmtiTagMap::~JvmtiTagMap() { 452 453 // no lock acquired as we assume the enclosing environment is 454 // also being destroryed. 455 ((JvmtiEnvBase *)_env)->set_tag_map(NULL); 456 457 JvmtiTagHashmapEntry** table = _hashmap->table(); 458 for (int j = 0; j < _hashmap->size(); j++) { 459 JvmtiTagHashmapEntry* entry = table[j]; 460 while (entry != NULL) { 461 JvmtiTagHashmapEntry* next = entry->next(); 462 delete entry; 463 entry = next; 464 } 465 } 466 467 // finally destroy the hashmap 468 delete _hashmap; 469 _hashmap = NULL; 470 471 // remove any entries on the free list 472 JvmtiTagHashmapEntry* entry = _free_entries; 473 while (entry != NULL) { 474 JvmtiTagHashmapEntry* next = entry->next(); 475 delete entry; 476 entry = next; 477 } 478 _free_entries = NULL; 479 } 480 481 // create a hashmap entry 482 // - if there's an entry on the (per-environment) free list then this 483 // is returned. Otherwise an new entry is allocated. 484 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) { 485 assert(Thread::current()->is_VM_thread() || is_locked(), "checking"); 486 JvmtiTagHashmapEntry* entry; 487 if (_free_entries == NULL) { 488 entry = new JvmtiTagHashmapEntry(ref, tag); 489 } else { 490 assert(_free_entries_count > 0, "mismatched _free_entries_count"); 491 _free_entries_count--; 492 entry = _free_entries; 493 _free_entries = entry->next(); 494 entry->init(ref, tag); 495 } 496 return entry; 497 } 498 499 // destroy an entry by returning it to the free list 500 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) { 501 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 502 // limit the size of the free list 503 if (_free_entries_count >= max_free_entries) { 504 delete entry; 505 } else { 506 entry->set_next(_free_entries); 507 _free_entries = entry; 508 _free_entries_count++; 509 } 510 } 511 512 // returns the tag map for the given environments. If the tag map 513 // doesn't exist then it is created. 514 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) { 515 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire(); 516 if (tag_map == NULL) { 517 MutexLocker mu(JvmtiThreadState_lock); 518 tag_map = ((JvmtiEnvBase*)env)->tag_map(); 519 if (tag_map == NULL) { 520 tag_map = new JvmtiTagMap(env); 521 } 522 } else { 523 DEBUG_ONLY(Thread::current()->check_possible_safepoint()); 524 } 525 return tag_map; 526 } 527 528 // iterate over all entries in the tag map. 529 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 530 hashmap()->entry_iterate(closure); 531 } 532 533 // returns true if the hashmaps are empty 534 bool JvmtiTagMap::is_empty() { 535 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 536 return hashmap()->entry_count() == 0; 537 } 538 539 540 // Return the tag value for an object, or 0 if the object is 541 // not tagged 542 // 543 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) { 544 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o); 545 if (entry == NULL) { 546 return 0; 547 } else { 548 return entry->tag(); 549 } 550 } 551 552 553 // A CallbackWrapper is a support class for querying and tagging an object 554 // around a callback to a profiler. The constructor does pre-callback 555 // work to get the tag value, klass tag value, ... and the destructor 556 // does the post-callback work of tagging or untagging the object. 557 // 558 // { 559 // CallbackWrapper wrapper(tag_map, o); 560 // 561 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...) 562 // 563 // } // wrapper goes out of scope here which results in the destructor 564 // checking to see if the object has been tagged, untagged, or the 565 // tag value has changed. 566 // 567 class CallbackWrapper : public StackObj { 568 private: 569 JvmtiTagMap* _tag_map; 570 JvmtiTagHashmap* _hashmap; 571 JvmtiTagHashmapEntry* _entry; 572 oop _o; 573 jlong _obj_size; 574 jlong _obj_tag; 575 jlong _klass_tag; 576 577 protected: 578 JvmtiTagMap* tag_map() const { return _tag_map; } 579 580 // invoked post-callback to tag, untag, or update the tag of an object 581 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap, 582 JvmtiTagHashmapEntry* entry, jlong obj_tag); 583 public: 584 CallbackWrapper(JvmtiTagMap* tag_map, oop o) { 585 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(), 586 "MT unsafe or must be VM thread"); 587 588 // object to tag 589 _o = o; 590 591 // object size 592 _obj_size = (jlong)_o->size() * wordSize; 593 594 // record the context 595 _tag_map = tag_map; 596 _hashmap = tag_map->hashmap(); 597 _entry = _hashmap->find(_o); 598 599 // get object tag 600 _obj_tag = (_entry == NULL) ? 0 : _entry->tag(); 601 602 // get the class and the class's tag value 603 assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?"); 604 605 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror()); 606 } 607 608 ~CallbackWrapper() { 609 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag); 610 } 611 612 inline jlong* obj_tag_p() { return &_obj_tag; } 613 inline jlong obj_size() const { return _obj_size; } 614 inline jlong obj_tag() const { return _obj_tag; } 615 inline jlong klass_tag() const { return _klass_tag; } 616 }; 617 618 619 620 // callback post-callback to tag, untag, or update the tag of an object 621 void inline CallbackWrapper::post_callback_tag_update(oop o, 622 JvmtiTagHashmap* hashmap, 623 JvmtiTagHashmapEntry* entry, 624 jlong obj_tag) { 625 if (entry == NULL) { 626 if (obj_tag != 0) { 627 // callback has tagged the object 628 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 629 entry = tag_map()->create_entry(o, obj_tag); 630 hashmap->add(o, entry); 631 } 632 } else { 633 // object was previously tagged - the callback may have untagged 634 // the object or changed the tag value 635 if (obj_tag == 0) { 636 637 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o); 638 assert(entry_removed == entry, "checking"); 639 tag_map()->destroy_entry(entry); 640 641 } else { 642 if (obj_tag != entry->tag()) { 643 entry->set_tag(obj_tag); 644 } 645 } 646 } 647 } 648 649 // An extended CallbackWrapper used when reporting an object reference 650 // to the agent. 651 // 652 // { 653 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o); 654 // 655 // (*callback)(wrapper.klass_tag(), 656 // wrapper.obj_size(), 657 // wrapper.obj_tag_p() 658 // wrapper.referrer_tag_p(), ...) 659 // 660 // } // wrapper goes out of scope here which results in the destructor 661 // checking to see if the referrer object has been tagged, untagged, 662 // or the tag value has changed. 663 // 664 class TwoOopCallbackWrapper : public CallbackWrapper { 665 private: 666 bool _is_reference_to_self; 667 JvmtiTagHashmap* _referrer_hashmap; 668 JvmtiTagHashmapEntry* _referrer_entry; 669 oop _referrer; 670 jlong _referrer_obj_tag; 671 jlong _referrer_klass_tag; 672 jlong* _referrer_tag_p; 673 674 bool is_reference_to_self() const { return _is_reference_to_self; } 675 676 public: 677 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) : 678 CallbackWrapper(tag_map, o) 679 { 680 // self reference needs to be handled in a special way 681 _is_reference_to_self = (referrer == o); 682 683 if (_is_reference_to_self) { 684 _referrer_klass_tag = klass_tag(); 685 _referrer_tag_p = obj_tag_p(); 686 } else { 687 _referrer = referrer; 688 // record the context 689 _referrer_hashmap = tag_map->hashmap(); 690 _referrer_entry = _referrer_hashmap->find(_referrer); 691 692 // get object tag 693 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag(); 694 _referrer_tag_p = &_referrer_obj_tag; 695 696 // get referrer class tag. 697 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror()); 698 } 699 } 700 701 ~TwoOopCallbackWrapper() { 702 if (!is_reference_to_self()){ 703 post_callback_tag_update(_referrer, 704 _referrer_hashmap, 705 _referrer_entry, 706 _referrer_obj_tag); 707 } 708 } 709 710 // address of referrer tag 711 // (for a self reference this will return the same thing as obj_tag_p()) 712 inline jlong* referrer_tag_p() { return _referrer_tag_p; } 713 714 // referrer's class tag 715 inline jlong referrer_klass_tag() { return _referrer_klass_tag; } 716 }; 717 718 // tag an object 719 // 720 // This function is performance critical. If many threads attempt to tag objects 721 // around the same time then it's possible that the Mutex associated with the 722 // tag map will be a hot lock. 723 void JvmtiTagMap::set_tag(jobject object, jlong tag) { 724 MutexLocker ml(lock()); 725 726 // resolve the object 727 oop o = JNIHandles::resolve_non_null(object); 728 729 // see if the object is already tagged 730 JvmtiTagHashmap* hashmap = _hashmap; 731 JvmtiTagHashmapEntry* entry = hashmap->find(o); 732 733 // if the object is not already tagged then we tag it 734 if (entry == NULL) { 735 if (tag != 0) { 736 entry = create_entry(o, tag); 737 hashmap->add(o, entry); 738 } else { 739 // no-op 740 } 741 } else { 742 // if the object is already tagged then we either update 743 // the tag (if a new tag value has been provided) 744 // or remove the object if the new tag value is 0. 745 if (tag == 0) { 746 hashmap->remove(o); 747 destroy_entry(entry); 748 } else { 749 entry->set_tag(tag); 750 } 751 } 752 } 753 754 // get the tag for an object 755 jlong JvmtiTagMap::get_tag(jobject object) { 756 MutexLocker ml(lock()); 757 758 // resolve the object 759 oop o = JNIHandles::resolve_non_null(object); 760 761 return tag_for(this, o); 762 } 763 764 765 // Helper class used to describe the static or instance fields of a class. 766 // For each field it holds the field index (as defined by the JVMTI specification), 767 // the field type, and the offset. 768 769 class ClassFieldDescriptor: public CHeapObj<mtInternal> { 770 private: 771 int _field_index; 772 int _field_offset; 773 char _field_type; 774 public: 775 ClassFieldDescriptor(int index, char type, int offset) : 776 _field_index(index), _field_offset(offset), _field_type(type) { 777 } 778 int field_index() const { return _field_index; } 779 char field_type() const { return _field_type; } 780 int field_offset() const { return _field_offset; } 781 }; 782 783 class ClassFieldMap: public CHeapObj<mtInternal> { 784 private: 785 enum { 786 initial_field_count = 5 787 }; 788 789 // list of field descriptors 790 GrowableArray<ClassFieldDescriptor*>* _fields; 791 792 // constructor 793 ClassFieldMap(); 794 795 // add a field 796 void add(int index, char type, int offset); 797 798 // returns the field count for the given class 799 static int compute_field_count(InstanceKlass* ik); 800 801 public: 802 ~ClassFieldMap(); 803 804 // access 805 int field_count() { return _fields->length(); } 806 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); } 807 808 // functions to create maps of static or instance fields 809 static ClassFieldMap* create_map_of_static_fields(Klass* k); 810 static ClassFieldMap* create_map_of_instance_fields(oop obj); 811 }; 812 813 ClassFieldMap::ClassFieldMap() { 814 _fields = new (ResourceObj::C_HEAP, mtInternal) 815 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true); 816 } 817 818 ClassFieldMap::~ClassFieldMap() { 819 for (int i=0; i<_fields->length(); i++) { 820 delete _fields->at(i); 821 } 822 delete _fields; 823 } 824 825 void ClassFieldMap::add(int index, char type, int offset) { 826 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset); 827 _fields->append(field); 828 } 829 830 // Returns a heap allocated ClassFieldMap to describe the static fields 831 // of the given class. 832 // 833 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) { 834 HandleMark hm; 835 InstanceKlass* ik = InstanceKlass::cast(k); 836 837 // create the field map 838 ClassFieldMap* field_map = new ClassFieldMap(); 839 840 FilteredFieldStream f(ik, false, false); 841 int max_field_index = f.field_count()-1; 842 843 int index = 0; 844 for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) { 845 // ignore instance fields 846 if (!fld.access_flags().is_static()) { 847 continue; 848 } 849 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 850 } 851 return field_map; 852 } 853 854 // Returns a heap allocated ClassFieldMap to describe the instance fields 855 // of the given class. All instance fields are included (this means public 856 // and private fields declared in superclasses and superinterfaces too). 857 // 858 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) { 859 HandleMark hm; 860 InstanceKlass* ik = InstanceKlass::cast(obj->klass()); 861 862 // create the field map 863 ClassFieldMap* field_map = new ClassFieldMap(); 864 865 FilteredFieldStream f(ik, false, false); 866 867 int max_field_index = f.field_count()-1; 868 869 int index = 0; 870 for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) { 871 // ignore static fields 872 if (fld.access_flags().is_static()) { 873 continue; 874 } 875 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 876 } 877 878 return field_map; 879 } 880 881 // Helper class used to cache a ClassFileMap for the instance fields of 882 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during 883 // heap iteration and avoid creating a field map for each object in the heap 884 // (only need to create the map when the first instance of a class is encountered). 885 // 886 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> { 887 private: 888 enum { 889 initial_class_count = 200 890 }; 891 ClassFieldMap* _field_map; 892 893 ClassFieldMap* field_map() const { return _field_map; } 894 895 JvmtiCachedClassFieldMap(ClassFieldMap* field_map); 896 ~JvmtiCachedClassFieldMap(); 897 898 static GrowableArray<InstanceKlass*>* _class_list; 899 static void add_to_class_list(InstanceKlass* ik); 900 901 public: 902 // returns the field map for a given object (returning map cached 903 // by InstanceKlass if possible 904 static ClassFieldMap* get_map_of_instance_fields(oop obj); 905 906 // removes the field map from all instanceKlasses - should be 907 // called before VM operation completes 908 static void clear_cache(); 909 910 // returns the number of ClassFieldMap cached by instanceKlasses 911 static int cached_field_map_count(); 912 }; 913 914 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list; 915 916 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) { 917 _field_map = field_map; 918 } 919 920 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() { 921 if (_field_map != NULL) { 922 delete _field_map; 923 } 924 } 925 926 // Marker class to ensure that the class file map cache is only used in a defined 927 // scope. 928 class ClassFieldMapCacheMark : public StackObj { 929 private: 930 static bool _is_active; 931 public: 932 ClassFieldMapCacheMark() { 933 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 934 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty"); 935 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested"); 936 _is_active = true; 937 } 938 ~ClassFieldMapCacheMark() { 939 JvmtiCachedClassFieldMap::clear_cache(); 940 _is_active = false; 941 } 942 static bool is_active() { return _is_active; } 943 }; 944 945 bool ClassFieldMapCacheMark::_is_active; 946 947 948 // record that the given InstanceKlass is caching a field map 949 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) { 950 if (_class_list == NULL) { 951 _class_list = new (ResourceObj::C_HEAP, mtInternal) 952 GrowableArray<InstanceKlass*>(initial_class_count, true); 953 } 954 _class_list->push(ik); 955 } 956 957 // returns the instance field map for the given object 958 // (returns field map cached by the InstanceKlass if possible) 959 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) { 960 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 961 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active"); 962 963 Klass* k = obj->klass(); 964 InstanceKlass* ik = InstanceKlass::cast(k); 965 966 // return cached map if possible 967 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 968 if (cached_map != NULL) { 969 assert(cached_map->field_map() != NULL, "missing field list"); 970 return cached_map->field_map(); 971 } else { 972 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj); 973 cached_map = new JvmtiCachedClassFieldMap(field_map); 974 ik->set_jvmti_cached_class_field_map(cached_map); 975 add_to_class_list(ik); 976 return field_map; 977 } 978 } 979 980 // remove the fields maps cached from all instanceKlasses 981 void JvmtiCachedClassFieldMap::clear_cache() { 982 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 983 if (_class_list != NULL) { 984 for (int i = 0; i < _class_list->length(); i++) { 985 InstanceKlass* ik = _class_list->at(i); 986 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 987 assert(cached_map != NULL, "should not be NULL"); 988 ik->set_jvmti_cached_class_field_map(NULL); 989 delete cached_map; // deletes the encapsulated field map 990 } 991 delete _class_list; 992 _class_list = NULL; 993 } 994 } 995 996 // returns the number of ClassFieldMap cached by instanceKlasses 997 int JvmtiCachedClassFieldMap::cached_field_map_count() { 998 return (_class_list == NULL) ? 0 : _class_list->length(); 999 } 1000 1001 // helper function to indicate if an object is filtered by its tag or class tag 1002 static inline bool is_filtered_by_heap_filter(jlong obj_tag, 1003 jlong klass_tag, 1004 int heap_filter) { 1005 // apply the heap filter 1006 if (obj_tag != 0) { 1007 // filter out tagged objects 1008 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true; 1009 } else { 1010 // filter out untagged objects 1011 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true; 1012 } 1013 if (klass_tag != 0) { 1014 // filter out objects with tagged classes 1015 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true; 1016 } else { 1017 // filter out objects with untagged classes. 1018 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true; 1019 } 1020 return false; 1021 } 1022 1023 // helper function to indicate if an object is filtered by a klass filter 1024 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) { 1025 if (klass_filter != NULL) { 1026 if (obj->klass() != klass_filter) { 1027 return true; 1028 } 1029 } 1030 return false; 1031 } 1032 1033 // helper function to tell if a field is a primitive field or not 1034 static inline bool is_primitive_field_type(char type) { 1035 return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY); 1036 } 1037 1038 // helper function to copy the value from location addr to jvalue. 1039 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) { 1040 switch (value_type) { 1041 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; } 1042 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; } 1043 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; } 1044 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; } 1045 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; } 1046 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; } 1047 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; } 1048 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; } 1049 default: ShouldNotReachHere(); 1050 } 1051 } 1052 1053 // helper function to invoke string primitive value callback 1054 // returns visit control flags 1055 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb, 1056 CallbackWrapper* wrapper, 1057 oop str, 1058 void* user_data) 1059 { 1060 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 1061 1062 typeArrayOop s_value = java_lang_String::value(str); 1063 1064 // JDK-6584008: the value field may be null if a String instance is 1065 // partially constructed. 1066 if (s_value == NULL) { 1067 return 0; 1068 } 1069 // get the string value and length 1070 // (string value may be offset from the base) 1071 int s_len = java_lang_String::length(str); 1072 bool is_latin1 = java_lang_String::is_latin1(str); 1073 jchar* value; 1074 if (s_len > 0) { 1075 if (!is_latin1) { 1076 value = s_value->char_at_addr(0); 1077 } else { 1078 // Inflate latin1 encoded string to UTF16 1079 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal); 1080 for (int i = 0; i < s_len; i++) { 1081 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff; 1082 } 1083 value = &buf[0]; 1084 } 1085 } else { 1086 // Don't use char_at_addr(0) if length is 0 1087 value = (jchar*) s_value->base(T_CHAR); 1088 } 1089 1090 // invoke the callback 1091 jint res = (*cb)(wrapper->klass_tag(), 1092 wrapper->obj_size(), 1093 wrapper->obj_tag_p(), 1094 value, 1095 (jint)s_len, 1096 user_data); 1097 1098 if (is_latin1 && s_len > 0) { 1099 FREE_C_HEAP_ARRAY(jchar, value); 1100 } 1101 return res; 1102 } 1103 1104 // helper function to invoke string primitive value callback 1105 // returns visit control flags 1106 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb, 1107 CallbackWrapper* wrapper, 1108 oop obj, 1109 void* user_data) 1110 { 1111 assert(obj->is_typeArray(), "not a primitive array"); 1112 1113 // get base address of first element 1114 typeArrayOop array = typeArrayOop(obj); 1115 BasicType type = TypeArrayKlass::cast(array->klass())->element_type(); 1116 void* elements = array->base(type); 1117 1118 // jvmtiPrimitiveType is defined so this mapping is always correct 1119 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type); 1120 1121 return (*cb)(wrapper->klass_tag(), 1122 wrapper->obj_size(), 1123 wrapper->obj_tag_p(), 1124 (jint)array->length(), 1125 elem_type, 1126 elements, 1127 user_data); 1128 } 1129 1130 // helper function to invoke the primitive field callback for all static fields 1131 // of a given class 1132 static jint invoke_primitive_field_callback_for_static_fields 1133 (CallbackWrapper* wrapper, 1134 oop obj, 1135 jvmtiPrimitiveFieldCallback cb, 1136 void* user_data) 1137 { 1138 // for static fields only the index will be set 1139 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1140 1141 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class"); 1142 if (java_lang_Class::is_primitive(obj)) { 1143 return 0; 1144 } 1145 Klass* klass = java_lang_Class::as_Klass(obj); 1146 1147 // ignore classes for object and type arrays 1148 if (!klass->is_instance_klass()) { 1149 return 0; 1150 } 1151 1152 // ignore classes which aren't linked yet 1153 InstanceKlass* ik = InstanceKlass::cast(klass); 1154 if (!ik->is_linked()) { 1155 return 0; 1156 } 1157 1158 // get the field map 1159 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 1160 1161 // invoke the callback for each static primitive field 1162 for (int i=0; i<field_map->field_count(); i++) { 1163 ClassFieldDescriptor* field = field_map->field_at(i); 1164 1165 // ignore non-primitive fields 1166 char type = field->field_type(); 1167 if (!is_primitive_field_type(type)) { 1168 continue; 1169 } 1170 // one-to-one mapping 1171 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1172 1173 // get offset and field value 1174 int offset = field->field_offset(); 1175 address addr = (address)klass->java_mirror() + offset; 1176 jvalue value; 1177 copy_to_jvalue(&value, addr, value_type); 1178 1179 // field index 1180 reference_info.field.index = field->field_index(); 1181 1182 // invoke the callback 1183 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 1184 &reference_info, 1185 wrapper->klass_tag(), 1186 wrapper->obj_tag_p(), 1187 value, 1188 value_type, 1189 user_data); 1190 if (res & JVMTI_VISIT_ABORT) { 1191 delete field_map; 1192 return res; 1193 } 1194 } 1195 1196 delete field_map; 1197 return 0; 1198 } 1199 1200 // helper function to invoke the primitive field callback for all instance fields 1201 // of a given object 1202 static jint invoke_primitive_field_callback_for_instance_fields( 1203 CallbackWrapper* wrapper, 1204 oop obj, 1205 jvmtiPrimitiveFieldCallback cb, 1206 void* user_data) 1207 { 1208 // for instance fields only the index will be set 1209 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1210 1211 // get the map of the instance fields 1212 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj); 1213 1214 // invoke the callback for each instance primitive field 1215 for (int i=0; i<fields->field_count(); i++) { 1216 ClassFieldDescriptor* field = fields->field_at(i); 1217 1218 // ignore non-primitive fields 1219 char type = field->field_type(); 1220 if (!is_primitive_field_type(type)) { 1221 continue; 1222 } 1223 // one-to-one mapping 1224 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1225 1226 // get offset and field value 1227 int offset = field->field_offset(); 1228 address addr = (address)obj + offset; 1229 jvalue value; 1230 copy_to_jvalue(&value, addr, value_type); 1231 1232 // field index 1233 reference_info.field.index = field->field_index(); 1234 1235 // invoke the callback 1236 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD, 1237 &reference_info, 1238 wrapper->klass_tag(), 1239 wrapper->obj_tag_p(), 1240 value, 1241 value_type, 1242 user_data); 1243 if (res & JVMTI_VISIT_ABORT) { 1244 return res; 1245 } 1246 } 1247 return 0; 1248 } 1249 1250 1251 // VM operation to iterate over all objects in the heap (both reachable 1252 // and unreachable) 1253 class VM_HeapIterateOperation: public VM_Operation { 1254 private: 1255 ObjectClosure* _blk; 1256 public: 1257 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; } 1258 1259 VMOp_Type type() const { return VMOp_HeapIterateOperation; } 1260 void doit() { 1261 // allows class files maps to be cached during iteration 1262 ClassFieldMapCacheMark cm; 1263 1264 // make sure that heap is parsable (fills TLABs with filler objects) 1265 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1266 1267 // Verify heap before iteration - if the heap gets corrupted then 1268 // JVMTI's IterateOverHeap will crash. 1269 if (VerifyBeforeIteration) { 1270 Universe::verify(); 1271 } 1272 1273 // do the iteration 1274 Universe::heap()->object_iterate(_blk); 1275 } 1276 1277 }; 1278 1279 1280 // An ObjectClosure used to support the deprecated IterateOverHeap and 1281 // IterateOverInstancesOfClass functions 1282 class IterateOverHeapObjectClosure: public ObjectClosure { 1283 private: 1284 JvmtiTagMap* _tag_map; 1285 Klass* _klass; 1286 jvmtiHeapObjectFilter _object_filter; 1287 jvmtiHeapObjectCallback _heap_object_callback; 1288 const void* _user_data; 1289 1290 // accessors 1291 JvmtiTagMap* tag_map() const { return _tag_map; } 1292 jvmtiHeapObjectFilter object_filter() const { return _object_filter; } 1293 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; } 1294 Klass* klass() const { return _klass; } 1295 const void* user_data() const { return _user_data; } 1296 1297 // indicates if iteration has been aborted 1298 bool _iteration_aborted; 1299 bool is_iteration_aborted() const { return _iteration_aborted; } 1300 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; } 1301 1302 public: 1303 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map, 1304 Klass* klass, 1305 jvmtiHeapObjectFilter object_filter, 1306 jvmtiHeapObjectCallback heap_object_callback, 1307 const void* user_data) : 1308 _tag_map(tag_map), 1309 _klass(klass), 1310 _object_filter(object_filter), 1311 _heap_object_callback(heap_object_callback), 1312 _user_data(user_data), 1313 _iteration_aborted(false) 1314 { 1315 } 1316 1317 void do_object(oop o); 1318 }; 1319 1320 // invoked for each object in the heap 1321 void IterateOverHeapObjectClosure::do_object(oop o) { 1322 // check if iteration has been halted 1323 if (is_iteration_aborted()) return; 1324 1325 // instanceof check when filtering by klass 1326 if (klass() != NULL && !o->is_a(klass())) { 1327 return; 1328 } 1329 // prepare for the calllback 1330 CallbackWrapper wrapper(tag_map(), o); 1331 1332 // if the object is tagged and we're only interested in untagged objects 1333 // then don't invoke the callback. Similiarly, if the object is untagged 1334 // and we're only interested in tagged objects we skip the callback. 1335 if (wrapper.obj_tag() != 0) { 1336 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return; 1337 } else { 1338 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return; 1339 } 1340 1341 // invoke the agent's callback 1342 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(), 1343 wrapper.obj_size(), 1344 wrapper.obj_tag_p(), 1345 (void*)user_data()); 1346 if (control == JVMTI_ITERATION_ABORT) { 1347 set_iteration_aborted(true); 1348 } 1349 } 1350 1351 // An ObjectClosure used to support the IterateThroughHeap function 1352 class IterateThroughHeapObjectClosure: public ObjectClosure { 1353 private: 1354 JvmtiTagMap* _tag_map; 1355 Klass* _klass; 1356 int _heap_filter; 1357 const jvmtiHeapCallbacks* _callbacks; 1358 const void* _user_data; 1359 1360 // accessor functions 1361 JvmtiTagMap* tag_map() const { return _tag_map; } 1362 int heap_filter() const { return _heap_filter; } 1363 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; } 1364 Klass* klass() const { return _klass; } 1365 const void* user_data() const { return _user_data; } 1366 1367 // indicates if the iteration has been aborted 1368 bool _iteration_aborted; 1369 bool is_iteration_aborted() const { return _iteration_aborted; } 1370 1371 // used to check the visit control flags. If the abort flag is set 1372 // then we set the iteration aborted flag so that the iteration completes 1373 // without processing any further objects 1374 bool check_flags_for_abort(jint flags) { 1375 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0; 1376 if (is_abort) { 1377 _iteration_aborted = true; 1378 } 1379 return is_abort; 1380 } 1381 1382 public: 1383 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map, 1384 Klass* klass, 1385 int heap_filter, 1386 const jvmtiHeapCallbacks* heap_callbacks, 1387 const void* user_data) : 1388 _tag_map(tag_map), 1389 _klass(klass), 1390 _heap_filter(heap_filter), 1391 _callbacks(heap_callbacks), 1392 _user_data(user_data), 1393 _iteration_aborted(false) 1394 { 1395 } 1396 1397 void do_object(oop o); 1398 }; 1399 1400 // invoked for each object in the heap 1401 void IterateThroughHeapObjectClosure::do_object(oop obj) { 1402 // check if iteration has been halted 1403 if (is_iteration_aborted()) return; 1404 1405 // apply class filter 1406 if (is_filtered_by_klass_filter(obj, klass())) return; 1407 1408 // prepare for callback 1409 CallbackWrapper wrapper(tag_map(), obj); 1410 1411 // check if filtered by the heap filter 1412 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) { 1413 return; 1414 } 1415 1416 // for arrays we need the length, otherwise -1 1417 bool is_array = obj->is_array(); 1418 int len = is_array ? arrayOop(obj)->length() : -1; 1419 1420 // invoke the object callback (if callback is provided) 1421 if (callbacks()->heap_iteration_callback != NULL) { 1422 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback; 1423 jint res = (*cb)(wrapper.klass_tag(), 1424 wrapper.obj_size(), 1425 wrapper.obj_tag_p(), 1426 (jint)len, 1427 (void*)user_data()); 1428 if (check_flags_for_abort(res)) return; 1429 } 1430 1431 // for objects and classes we report primitive fields if callback provided 1432 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) { 1433 jint res; 1434 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback; 1435 if (obj->klass() == SystemDictionary::Class_klass()) { 1436 res = invoke_primitive_field_callback_for_static_fields(&wrapper, 1437 obj, 1438 cb, 1439 (void*)user_data()); 1440 } else { 1441 res = invoke_primitive_field_callback_for_instance_fields(&wrapper, 1442 obj, 1443 cb, 1444 (void*)user_data()); 1445 } 1446 if (check_flags_for_abort(res)) return; 1447 } 1448 1449 // string callback 1450 if (!is_array && 1451 callbacks()->string_primitive_value_callback != NULL && 1452 obj->klass() == SystemDictionary::String_klass()) { 1453 jint res = invoke_string_value_callback( 1454 callbacks()->string_primitive_value_callback, 1455 &wrapper, 1456 obj, 1457 (void*)user_data() ); 1458 if (check_flags_for_abort(res)) return; 1459 } 1460 1461 // array callback 1462 if (is_array && 1463 callbacks()->array_primitive_value_callback != NULL && 1464 obj->is_typeArray()) { 1465 jint res = invoke_array_primitive_value_callback( 1466 callbacks()->array_primitive_value_callback, 1467 &wrapper, 1468 obj, 1469 (void*)user_data() ); 1470 if (check_flags_for_abort(res)) return; 1471 } 1472 }; 1473 1474 1475 // Deprecated function to iterate over all objects in the heap 1476 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter, 1477 Klass* klass, 1478 jvmtiHeapObjectCallback heap_object_callback, 1479 const void* user_data) 1480 { 1481 MutexLocker ml(Heap_lock); 1482 IterateOverHeapObjectClosure blk(this, 1483 klass, 1484 object_filter, 1485 heap_object_callback, 1486 user_data); 1487 VM_HeapIterateOperation op(&blk); 1488 VMThread::execute(&op); 1489 } 1490 1491 1492 // Iterates over all objects in the heap 1493 void JvmtiTagMap::iterate_through_heap(jint heap_filter, 1494 Klass* klass, 1495 const jvmtiHeapCallbacks* callbacks, 1496 const void* user_data) 1497 { 1498 MutexLocker ml(Heap_lock); 1499 IterateThroughHeapObjectClosure blk(this, 1500 klass, 1501 heap_filter, 1502 callbacks, 1503 user_data); 1504 VM_HeapIterateOperation op(&blk); 1505 VMThread::execute(&op); 1506 } 1507 1508 // support class for get_objects_with_tags 1509 1510 class TagObjectCollector : public JvmtiTagHashmapEntryClosure { 1511 private: 1512 JvmtiEnv* _env; 1513 jlong* _tags; 1514 jint _tag_count; 1515 1516 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs) 1517 GrowableArray<uint64_t>* _tag_results; // collected tags 1518 1519 public: 1520 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) { 1521 _env = env; 1522 _tags = (jlong*)tags; 1523 _tag_count = tag_count; 1524 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true); 1525 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true); 1526 } 1527 1528 ~TagObjectCollector() { 1529 delete _object_results; 1530 delete _tag_results; 1531 } 1532 1533 // for each tagged object check if the tag value matches 1534 // - if it matches then we create a JNI local reference to the object 1535 // and record the reference and tag value. 1536 // 1537 void do_entry(JvmtiTagHashmapEntry* entry) { 1538 for (int i=0; i<_tag_count; i++) { 1539 if (_tags[i] == entry->tag()) { 1540 // The reference in this tag map could be the only (implicitly weak) 1541 // reference to that object. If we hand it out, we need to keep it live wrt 1542 // SATB marking similar to other j.l.ref.Reference referents. This is 1543 // achieved by using a phantom load in the object() accessor. 1544 oop o = entry->object(); 1545 assert(o != NULL && Universe::heap()->is_in(o), "sanity check"); 1546 jobject ref = JNIHandles::make_local(JavaThread::current(), o); 1547 _object_results->append(ref); 1548 _tag_results->append((uint64_t)entry->tag()); 1549 } 1550 } 1551 } 1552 1553 // return the results from the collection 1554 // 1555 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1556 jvmtiError error; 1557 int count = _object_results->length(); 1558 assert(count >= 0, "sanity check"); 1559 1560 // if object_result_ptr is not NULL then allocate the result and copy 1561 // in the object references. 1562 if (object_result_ptr != NULL) { 1563 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr); 1564 if (error != JVMTI_ERROR_NONE) { 1565 return error; 1566 } 1567 for (int i=0; i<count; i++) { 1568 (*object_result_ptr)[i] = _object_results->at(i); 1569 } 1570 } 1571 1572 // if tag_result_ptr is not NULL then allocate the result and copy 1573 // in the tag values. 1574 if (tag_result_ptr != NULL) { 1575 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr); 1576 if (error != JVMTI_ERROR_NONE) { 1577 if (object_result_ptr != NULL) { 1578 _env->Deallocate((unsigned char*)object_result_ptr); 1579 } 1580 return error; 1581 } 1582 for (int i=0; i<count; i++) { 1583 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i); 1584 } 1585 } 1586 1587 *count_ptr = count; 1588 return JVMTI_ERROR_NONE; 1589 } 1590 }; 1591 1592 // return the list of objects with the specified tags 1593 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags, 1594 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1595 1596 TagObjectCollector collector(env(), tags, count); 1597 { 1598 // iterate over all tagged objects 1599 MutexLocker ml(lock()); 1600 entry_iterate(&collector); 1601 } 1602 return collector.result(count_ptr, object_result_ptr, tag_result_ptr); 1603 } 1604 1605 1606 // ObjectMarker is used to support the marking objects when walking the 1607 // heap. 1608 // 1609 // This implementation uses the existing mark bits in an object for 1610 // marking. Objects that are marked must later have their headers restored. 1611 // As most objects are unlocked and don't have their identity hash computed 1612 // we don't have to save their headers. Instead we save the headers that 1613 // are "interesting". Later when the headers are restored this implementation 1614 // restores all headers to their initial value and then restores the few 1615 // objects that had interesting headers. 1616 // 1617 // Future work: This implementation currently uses growable arrays to save 1618 // the oop and header of interesting objects. As an optimization we could 1619 // use the same technique as the GC and make use of the unused area 1620 // between top() and end(). 1621 // 1622 1623 // An ObjectClosure used to restore the mark bits of an object 1624 class RestoreMarksClosure : public ObjectClosure { 1625 public: 1626 void do_object(oop o) { 1627 if (o != NULL) { 1628 markWord mark = o->mark(); 1629 if (mark.is_marked()) { 1630 o->init_mark(); 1631 } 1632 } 1633 } 1634 }; 1635 1636 // ObjectMarker provides the mark and visited functions 1637 class ObjectMarker : AllStatic { 1638 private: 1639 // saved headers 1640 static GrowableArray<oop>* _saved_oop_stack; 1641 static GrowableArray<markWord>* _saved_mark_stack; 1642 static bool _needs_reset; // do we need to reset mark bits? 1643 1644 public: 1645 static void init(); // initialize 1646 static void done(); // clean-up 1647 1648 static inline void mark(oop o); // mark an object 1649 static inline bool visited(oop o); // check if object has been visited 1650 1651 static inline bool needs_reset() { return _needs_reset; } 1652 static inline void set_needs_reset(bool v) { _needs_reset = v; } 1653 }; 1654 1655 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL; 1656 GrowableArray<markWord>* ObjectMarker::_saved_mark_stack = NULL; 1657 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default 1658 1659 // initialize ObjectMarker - prepares for object marking 1660 void ObjectMarker::init() { 1661 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 1662 1663 // prepare heap for iteration 1664 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1665 1666 // create stacks for interesting headers 1667 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markWord>(4000, true); 1668 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true); 1669 1670 if (UseBiasedLocking) { 1671 BiasedLocking::preserve_marks(); 1672 } 1673 } 1674 1675 // Object marking is done so restore object headers 1676 void ObjectMarker::done() { 1677 // iterate over all objects and restore the mark bits to 1678 // their initial value 1679 RestoreMarksClosure blk; 1680 if (needs_reset()) { 1681 Universe::heap()->object_iterate(&blk); 1682 } else { 1683 // We don't need to reset mark bits on this call, but reset the 1684 // flag to the default for the next call. 1685 set_needs_reset(true); 1686 } 1687 1688 // now restore the interesting headers 1689 for (int i = 0; i < _saved_oop_stack->length(); i++) { 1690 oop o = _saved_oop_stack->at(i); 1691 markWord mark = _saved_mark_stack->at(i); 1692 o->set_mark(mark); 1693 } 1694 1695 if (UseBiasedLocking) { 1696 BiasedLocking::restore_marks(); 1697 } 1698 1699 // free the stacks 1700 delete _saved_oop_stack; 1701 delete _saved_mark_stack; 1702 } 1703 1704 // mark an object 1705 inline void ObjectMarker::mark(oop o) { 1706 assert(Universe::heap()->is_in(o), "sanity check"); 1707 assert(!o->mark().is_marked(), "should only mark an object once"); 1708 1709 // object's mark word 1710 markWord mark = o->mark(); 1711 1712 if (o->mark_must_be_preserved(mark)) { 1713 _saved_mark_stack->push(mark); 1714 _saved_oop_stack->push(o); 1715 } 1716 1717 // mark the object 1718 o->set_mark(markWord::prototype().set_marked()); 1719 } 1720 1721 // return true if object is marked 1722 inline bool ObjectMarker::visited(oop o) { 1723 return o->mark().is_marked(); 1724 } 1725 1726 // Stack allocated class to help ensure that ObjectMarker is used 1727 // correctly. Constructor initializes ObjectMarker, destructor calls 1728 // ObjectMarker's done() function to restore object headers. 1729 class ObjectMarkerController : public StackObj { 1730 public: 1731 ObjectMarkerController() { 1732 ObjectMarker::init(); 1733 } 1734 ~ObjectMarkerController() { 1735 ObjectMarker::done(); 1736 } 1737 }; 1738 1739 1740 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind 1741 // (not performance critical as only used for roots) 1742 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) { 1743 switch (kind) { 1744 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL; 1745 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS; 1746 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR; 1747 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL; 1748 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL; 1749 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD; 1750 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER; 1751 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER; 1752 } 1753 } 1754 1755 // Base class for all heap walk contexts. The base class maintains a flag 1756 // to indicate if the context is valid or not. 1757 class HeapWalkContext { 1758 private: 1759 bool _valid; 1760 public: 1761 HeapWalkContext(bool valid) { _valid = valid; } 1762 void invalidate() { _valid = false; } 1763 bool is_valid() const { return _valid; } 1764 }; 1765 1766 // A basic heap walk context for the deprecated heap walking functions. 1767 // The context for a basic heap walk are the callbacks and fields used by 1768 // the referrer caching scheme. 1769 class BasicHeapWalkContext: public HeapWalkContext { 1770 private: 1771 jvmtiHeapRootCallback _heap_root_callback; 1772 jvmtiStackReferenceCallback _stack_ref_callback; 1773 jvmtiObjectReferenceCallback _object_ref_callback; 1774 1775 // used for caching 1776 oop _last_referrer; 1777 jlong _last_referrer_tag; 1778 1779 public: 1780 BasicHeapWalkContext() : HeapWalkContext(false) { } 1781 1782 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback, 1783 jvmtiStackReferenceCallback stack_ref_callback, 1784 jvmtiObjectReferenceCallback object_ref_callback) : 1785 HeapWalkContext(true), 1786 _heap_root_callback(heap_root_callback), 1787 _stack_ref_callback(stack_ref_callback), 1788 _object_ref_callback(object_ref_callback), 1789 _last_referrer(NULL), 1790 _last_referrer_tag(0) { 1791 } 1792 1793 // accessors 1794 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; } 1795 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; } 1796 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; } 1797 1798 oop last_referrer() const { return _last_referrer; } 1799 void set_last_referrer(oop referrer) { _last_referrer = referrer; } 1800 jlong last_referrer_tag() const { return _last_referrer_tag; } 1801 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; } 1802 }; 1803 1804 // The advanced heap walk context for the FollowReferences functions. 1805 // The context is the callbacks, and the fields used for filtering. 1806 class AdvancedHeapWalkContext: public HeapWalkContext { 1807 private: 1808 jint _heap_filter; 1809 Klass* _klass_filter; 1810 const jvmtiHeapCallbacks* _heap_callbacks; 1811 1812 public: 1813 AdvancedHeapWalkContext() : HeapWalkContext(false) { } 1814 1815 AdvancedHeapWalkContext(jint heap_filter, 1816 Klass* klass_filter, 1817 const jvmtiHeapCallbacks* heap_callbacks) : 1818 HeapWalkContext(true), 1819 _heap_filter(heap_filter), 1820 _klass_filter(klass_filter), 1821 _heap_callbacks(heap_callbacks) { 1822 } 1823 1824 // accessors 1825 jint heap_filter() const { return _heap_filter; } 1826 Klass* klass_filter() const { return _klass_filter; } 1827 1828 const jvmtiHeapReferenceCallback heap_reference_callback() const { 1829 return _heap_callbacks->heap_reference_callback; 1830 }; 1831 const jvmtiPrimitiveFieldCallback primitive_field_callback() const { 1832 return _heap_callbacks->primitive_field_callback; 1833 } 1834 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const { 1835 return _heap_callbacks->array_primitive_value_callback; 1836 } 1837 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const { 1838 return _heap_callbacks->string_primitive_value_callback; 1839 } 1840 }; 1841 1842 // The CallbackInvoker is a class with static functions that the heap walk can call 1843 // into to invoke callbacks. It works in one of two modes. The "basic" mode is 1844 // used for the deprecated IterateOverReachableObjects functions. The "advanced" 1845 // mode is for the newer FollowReferences function which supports a lot of 1846 // additional callbacks. 1847 class CallbackInvoker : AllStatic { 1848 private: 1849 // heap walk styles 1850 enum { basic, advanced }; 1851 static int _heap_walk_type; 1852 static bool is_basic_heap_walk() { return _heap_walk_type == basic; } 1853 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; } 1854 1855 // context for basic style heap walk 1856 static BasicHeapWalkContext _basic_context; 1857 static BasicHeapWalkContext* basic_context() { 1858 assert(_basic_context.is_valid(), "invalid"); 1859 return &_basic_context; 1860 } 1861 1862 // context for advanced style heap walk 1863 static AdvancedHeapWalkContext _advanced_context; 1864 static AdvancedHeapWalkContext* advanced_context() { 1865 assert(_advanced_context.is_valid(), "invalid"); 1866 return &_advanced_context; 1867 } 1868 1869 // context needed for all heap walks 1870 static JvmtiTagMap* _tag_map; 1871 static const void* _user_data; 1872 static GrowableArray<oop>* _visit_stack; 1873 1874 // accessors 1875 static JvmtiTagMap* tag_map() { return _tag_map; } 1876 static const void* user_data() { return _user_data; } 1877 static GrowableArray<oop>* visit_stack() { return _visit_stack; } 1878 1879 // if the object hasn't been visited then push it onto the visit stack 1880 // so that it will be visited later 1881 static inline bool check_for_visit(oop obj) { 1882 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj); 1883 return true; 1884 } 1885 1886 // invoke basic style callbacks 1887 static inline bool invoke_basic_heap_root_callback 1888 (jvmtiHeapRootKind root_kind, oop obj); 1889 static inline bool invoke_basic_stack_ref_callback 1890 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method, 1891 int slot, oop obj); 1892 static inline bool invoke_basic_object_reference_callback 1893 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index); 1894 1895 // invoke advanced style callbacks 1896 static inline bool invoke_advanced_heap_root_callback 1897 (jvmtiHeapReferenceKind ref_kind, oop obj); 1898 static inline bool invoke_advanced_stack_ref_callback 1899 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth, 1900 jmethodID method, jlocation bci, jint slot, oop obj); 1901 static inline bool invoke_advanced_object_reference_callback 1902 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index); 1903 1904 // used to report the value of primitive fields 1905 static inline bool report_primitive_field 1906 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type); 1907 1908 public: 1909 // initialize for basic mode 1910 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1911 GrowableArray<oop>* visit_stack, 1912 const void* user_data, 1913 BasicHeapWalkContext context); 1914 1915 // initialize for advanced mode 1916 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1917 GrowableArray<oop>* visit_stack, 1918 const void* user_data, 1919 AdvancedHeapWalkContext context); 1920 1921 // functions to report roots 1922 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o); 1923 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth, 1924 jmethodID m, oop o); 1925 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth, 1926 jmethodID method, jlocation bci, jint slot, oop o); 1927 1928 // functions to report references 1929 static inline bool report_array_element_reference(oop referrer, oop referree, jint index); 1930 static inline bool report_class_reference(oop referrer, oop referree); 1931 static inline bool report_class_loader_reference(oop referrer, oop referree); 1932 static inline bool report_signers_reference(oop referrer, oop referree); 1933 static inline bool report_protection_domain_reference(oop referrer, oop referree); 1934 static inline bool report_superclass_reference(oop referrer, oop referree); 1935 static inline bool report_interface_reference(oop referrer, oop referree); 1936 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot); 1937 static inline bool report_field_reference(oop referrer, oop referree, jint slot); 1938 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index); 1939 static inline bool report_primitive_array_values(oop array); 1940 static inline bool report_string_value(oop str); 1941 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type); 1942 static inline bool report_primitive_static_field(oop o, jint index, address value, char type); 1943 }; 1944 1945 // statics 1946 int CallbackInvoker::_heap_walk_type; 1947 BasicHeapWalkContext CallbackInvoker::_basic_context; 1948 AdvancedHeapWalkContext CallbackInvoker::_advanced_context; 1949 JvmtiTagMap* CallbackInvoker::_tag_map; 1950 const void* CallbackInvoker::_user_data; 1951 GrowableArray<oop>* CallbackInvoker::_visit_stack; 1952 1953 // initialize for basic heap walk (IterateOverReachableObjects et al) 1954 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1955 GrowableArray<oop>* visit_stack, 1956 const void* user_data, 1957 BasicHeapWalkContext context) { 1958 _tag_map = tag_map; 1959 _visit_stack = visit_stack; 1960 _user_data = user_data; 1961 _basic_context = context; 1962 _advanced_context.invalidate(); // will trigger assertion if used 1963 _heap_walk_type = basic; 1964 } 1965 1966 // initialize for advanced heap walk (FollowReferences) 1967 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1968 GrowableArray<oop>* visit_stack, 1969 const void* user_data, 1970 AdvancedHeapWalkContext context) { 1971 _tag_map = tag_map; 1972 _visit_stack = visit_stack; 1973 _user_data = user_data; 1974 _advanced_context = context; 1975 _basic_context.invalidate(); // will trigger assertion if used 1976 _heap_walk_type = advanced; 1977 } 1978 1979 1980 // invoke basic style heap root callback 1981 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) { 1982 // if we heap roots should be reported 1983 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback(); 1984 if (cb == NULL) { 1985 return check_for_visit(obj); 1986 } 1987 1988 CallbackWrapper wrapper(tag_map(), obj); 1989 jvmtiIterationControl control = (*cb)(root_kind, 1990 wrapper.klass_tag(), 1991 wrapper.obj_size(), 1992 wrapper.obj_tag_p(), 1993 (void*)user_data()); 1994 // push root to visit stack when following references 1995 if (control == JVMTI_ITERATION_CONTINUE && 1996 basic_context()->object_ref_callback() != NULL) { 1997 visit_stack()->push(obj); 1998 } 1999 return control != JVMTI_ITERATION_ABORT; 2000 } 2001 2002 // invoke basic style stack ref callback 2003 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind, 2004 jlong thread_tag, 2005 jint depth, 2006 jmethodID method, 2007 int slot, 2008 oop obj) { 2009 // if we stack refs should be reported 2010 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback(); 2011 if (cb == NULL) { 2012 return check_for_visit(obj); 2013 } 2014 2015 CallbackWrapper wrapper(tag_map(), obj); 2016 jvmtiIterationControl control = (*cb)(root_kind, 2017 wrapper.klass_tag(), 2018 wrapper.obj_size(), 2019 wrapper.obj_tag_p(), 2020 thread_tag, 2021 depth, 2022 method, 2023 slot, 2024 (void*)user_data()); 2025 // push root to visit stack when following references 2026 if (control == JVMTI_ITERATION_CONTINUE && 2027 basic_context()->object_ref_callback() != NULL) { 2028 visit_stack()->push(obj); 2029 } 2030 return control != JVMTI_ITERATION_ABORT; 2031 } 2032 2033 // invoke basic style object reference callback 2034 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind, 2035 oop referrer, 2036 oop referree, 2037 jint index) { 2038 2039 BasicHeapWalkContext* context = basic_context(); 2040 2041 // callback requires the referrer's tag. If it's the same referrer 2042 // as the last call then we use the cached value. 2043 jlong referrer_tag; 2044 if (referrer == context->last_referrer()) { 2045 referrer_tag = context->last_referrer_tag(); 2046 } else { 2047 referrer_tag = tag_for(tag_map(), referrer); 2048 } 2049 2050 // do the callback 2051 CallbackWrapper wrapper(tag_map(), referree); 2052 jvmtiObjectReferenceCallback cb = context->object_ref_callback(); 2053 jvmtiIterationControl control = (*cb)(ref_kind, 2054 wrapper.klass_tag(), 2055 wrapper.obj_size(), 2056 wrapper.obj_tag_p(), 2057 referrer_tag, 2058 index, 2059 (void*)user_data()); 2060 2061 // record referrer and referrer tag. For self-references record the 2062 // tag value from the callback as this might differ from referrer_tag. 2063 context->set_last_referrer(referrer); 2064 if (referrer == referree) { 2065 context->set_last_referrer_tag(*wrapper.obj_tag_p()); 2066 } else { 2067 context->set_last_referrer_tag(referrer_tag); 2068 } 2069 2070 if (control == JVMTI_ITERATION_CONTINUE) { 2071 return check_for_visit(referree); 2072 } else { 2073 return control != JVMTI_ITERATION_ABORT; 2074 } 2075 } 2076 2077 // invoke advanced style heap root callback 2078 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind, 2079 oop obj) { 2080 AdvancedHeapWalkContext* context = advanced_context(); 2081 2082 // check that callback is provided 2083 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2084 if (cb == NULL) { 2085 return check_for_visit(obj); 2086 } 2087 2088 // apply class filter 2089 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2090 return check_for_visit(obj); 2091 } 2092 2093 // setup the callback wrapper 2094 CallbackWrapper wrapper(tag_map(), obj); 2095 2096 // apply tag filter 2097 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2098 wrapper.klass_tag(), 2099 context->heap_filter())) { 2100 return check_for_visit(obj); 2101 } 2102 2103 // for arrays we need the length, otherwise -1 2104 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2105 2106 // invoke the callback 2107 jint res = (*cb)(ref_kind, 2108 NULL, // referrer info 2109 wrapper.klass_tag(), 2110 0, // referrer_class_tag is 0 for heap root 2111 wrapper.obj_size(), 2112 wrapper.obj_tag_p(), 2113 NULL, // referrer_tag_p 2114 len, 2115 (void*)user_data()); 2116 if (res & JVMTI_VISIT_ABORT) { 2117 return false;// referrer class tag 2118 } 2119 if (res & JVMTI_VISIT_OBJECTS) { 2120 check_for_visit(obj); 2121 } 2122 return true; 2123 } 2124 2125 // report a reference from a thread stack to an object 2126 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind, 2127 jlong thread_tag, 2128 jlong tid, 2129 int depth, 2130 jmethodID method, 2131 jlocation bci, 2132 jint slot, 2133 oop obj) { 2134 AdvancedHeapWalkContext* context = advanced_context(); 2135 2136 // check that callback is provider 2137 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2138 if (cb == NULL) { 2139 return check_for_visit(obj); 2140 } 2141 2142 // apply class filter 2143 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2144 return check_for_visit(obj); 2145 } 2146 2147 // setup the callback wrapper 2148 CallbackWrapper wrapper(tag_map(), obj); 2149 2150 // apply tag filter 2151 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2152 wrapper.klass_tag(), 2153 context->heap_filter())) { 2154 return check_for_visit(obj); 2155 } 2156 2157 // setup the referrer info 2158 jvmtiHeapReferenceInfo reference_info; 2159 reference_info.stack_local.thread_tag = thread_tag; 2160 reference_info.stack_local.thread_id = tid; 2161 reference_info.stack_local.depth = depth; 2162 reference_info.stack_local.method = method; 2163 reference_info.stack_local.location = bci; 2164 reference_info.stack_local.slot = slot; 2165 2166 // for arrays we need the length, otherwise -1 2167 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2168 2169 // call into the agent 2170 int res = (*cb)(ref_kind, 2171 &reference_info, 2172 wrapper.klass_tag(), 2173 0, // referrer_class_tag is 0 for heap root (stack) 2174 wrapper.obj_size(), 2175 wrapper.obj_tag_p(), 2176 NULL, // referrer_tag is 0 for root 2177 len, 2178 (void*)user_data()); 2179 2180 if (res & JVMTI_VISIT_ABORT) { 2181 return false; 2182 } 2183 if (res & JVMTI_VISIT_OBJECTS) { 2184 check_for_visit(obj); 2185 } 2186 return true; 2187 } 2188 2189 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback 2190 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed. 2191 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \ 2192 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \ 2193 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \ 2194 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \ 2195 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \ 2196 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL)) 2197 2198 // invoke the object reference callback to report a reference 2199 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind, 2200 oop referrer, 2201 oop obj, 2202 jint index) 2203 { 2204 // field index is only valid field in reference_info 2205 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2206 2207 AdvancedHeapWalkContext* context = advanced_context(); 2208 2209 // check that callback is provider 2210 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2211 if (cb == NULL) { 2212 return check_for_visit(obj); 2213 } 2214 2215 // apply class filter 2216 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2217 return check_for_visit(obj); 2218 } 2219 2220 // setup the callback wrapper 2221 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj); 2222 2223 // apply tag filter 2224 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2225 wrapper.klass_tag(), 2226 context->heap_filter())) { 2227 return check_for_visit(obj); 2228 } 2229 2230 // field index is only valid field in reference_info 2231 reference_info.field.index = index; 2232 2233 // for arrays we need the length, otherwise -1 2234 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2235 2236 // invoke the callback 2237 int res = (*cb)(ref_kind, 2238 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL, 2239 wrapper.klass_tag(), 2240 wrapper.referrer_klass_tag(), 2241 wrapper.obj_size(), 2242 wrapper.obj_tag_p(), 2243 wrapper.referrer_tag_p(), 2244 len, 2245 (void*)user_data()); 2246 2247 if (res & JVMTI_VISIT_ABORT) { 2248 return false; 2249 } 2250 if (res & JVMTI_VISIT_OBJECTS) { 2251 check_for_visit(obj); 2252 } 2253 return true; 2254 } 2255 2256 // report a "simple root" 2257 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) { 2258 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL && 2259 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root"); 2260 2261 if (is_basic_heap_walk()) { 2262 // map to old style root kind 2263 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind); 2264 return invoke_basic_heap_root_callback(root_kind, obj); 2265 } else { 2266 assert(is_advanced_heap_walk(), "wrong heap walk type"); 2267 return invoke_advanced_heap_root_callback(kind, obj); 2268 } 2269 } 2270 2271 2272 // invoke the primitive array values 2273 inline bool CallbackInvoker::report_primitive_array_values(oop obj) { 2274 assert(obj->is_typeArray(), "not a primitive array"); 2275 2276 AdvancedHeapWalkContext* context = advanced_context(); 2277 assert(context->array_primitive_value_callback() != NULL, "no callback"); 2278 2279 // apply class filter 2280 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2281 return true; 2282 } 2283 2284 CallbackWrapper wrapper(tag_map(), obj); 2285 2286 // apply tag filter 2287 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2288 wrapper.klass_tag(), 2289 context->heap_filter())) { 2290 return true; 2291 } 2292 2293 // invoke the callback 2294 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(), 2295 &wrapper, 2296 obj, 2297 (void*)user_data()); 2298 return (!(res & JVMTI_VISIT_ABORT)); 2299 } 2300 2301 // invoke the string value callback 2302 inline bool CallbackInvoker::report_string_value(oop str) { 2303 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 2304 2305 AdvancedHeapWalkContext* context = advanced_context(); 2306 assert(context->string_primitive_value_callback() != NULL, "no callback"); 2307 2308 // apply class filter 2309 if (is_filtered_by_klass_filter(str, context->klass_filter())) { 2310 return true; 2311 } 2312 2313 CallbackWrapper wrapper(tag_map(), str); 2314 2315 // apply tag filter 2316 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2317 wrapper.klass_tag(), 2318 context->heap_filter())) { 2319 return true; 2320 } 2321 2322 // invoke the callback 2323 int res = invoke_string_value_callback(context->string_primitive_value_callback(), 2324 &wrapper, 2325 str, 2326 (void*)user_data()); 2327 return (!(res & JVMTI_VISIT_ABORT)); 2328 } 2329 2330 // invoke the primitive field callback 2331 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind, 2332 oop obj, 2333 jint index, 2334 address addr, 2335 char type) 2336 { 2337 // for primitive fields only the index will be set 2338 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2339 2340 AdvancedHeapWalkContext* context = advanced_context(); 2341 assert(context->primitive_field_callback() != NULL, "no callback"); 2342 2343 // apply class filter 2344 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2345 return true; 2346 } 2347 2348 CallbackWrapper wrapper(tag_map(), obj); 2349 2350 // apply tag filter 2351 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2352 wrapper.klass_tag(), 2353 context->heap_filter())) { 2354 return true; 2355 } 2356 2357 // the field index in the referrer 2358 reference_info.field.index = index; 2359 2360 // map the type 2361 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 2362 2363 // setup the jvalue 2364 jvalue value; 2365 copy_to_jvalue(&value, addr, value_type); 2366 2367 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback(); 2368 int res = (*cb)(ref_kind, 2369 &reference_info, 2370 wrapper.klass_tag(), 2371 wrapper.obj_tag_p(), 2372 value, 2373 value_type, 2374 (void*)user_data()); 2375 return (!(res & JVMTI_VISIT_ABORT)); 2376 } 2377 2378 2379 // instance field 2380 inline bool CallbackInvoker::report_primitive_instance_field(oop obj, 2381 jint index, 2382 address value, 2383 char type) { 2384 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD, 2385 obj, 2386 index, 2387 value, 2388 type); 2389 } 2390 2391 // static field 2392 inline bool CallbackInvoker::report_primitive_static_field(oop obj, 2393 jint index, 2394 address value, 2395 char type) { 2396 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 2397 obj, 2398 index, 2399 value, 2400 type); 2401 } 2402 2403 // report a JNI local (root object) to the profiler 2404 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) { 2405 if (is_basic_heap_walk()) { 2406 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL, 2407 thread_tag, 2408 depth, 2409 m, 2410 -1, 2411 obj); 2412 } else { 2413 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL, 2414 thread_tag, tid, 2415 depth, 2416 m, 2417 (jlocation)-1, 2418 -1, 2419 obj); 2420 } 2421 } 2422 2423 2424 // report a local (stack reference, root object) 2425 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag, 2426 jlong tid, 2427 jint depth, 2428 jmethodID method, 2429 jlocation bci, 2430 jint slot, 2431 oop obj) { 2432 if (is_basic_heap_walk()) { 2433 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL, 2434 thread_tag, 2435 depth, 2436 method, 2437 slot, 2438 obj); 2439 } else { 2440 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL, 2441 thread_tag, 2442 tid, 2443 depth, 2444 method, 2445 bci, 2446 slot, 2447 obj); 2448 } 2449 } 2450 2451 // report an object referencing a class. 2452 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) { 2453 if (is_basic_heap_walk()) { 2454 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2455 } else { 2456 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1); 2457 } 2458 } 2459 2460 // report a class referencing its class loader. 2461 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) { 2462 if (is_basic_heap_walk()) { 2463 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2464 } else { 2465 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2466 } 2467 } 2468 2469 // report a class referencing its signers. 2470 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) { 2471 if (is_basic_heap_walk()) { 2472 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1); 2473 } else { 2474 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1); 2475 } 2476 } 2477 2478 // report a class referencing its protection domain.. 2479 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) { 2480 if (is_basic_heap_walk()) { 2481 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2482 } else { 2483 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2484 } 2485 } 2486 2487 // report a class referencing its superclass. 2488 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) { 2489 if (is_basic_heap_walk()) { 2490 // Send this to be consistent with past implementation 2491 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2492 } else { 2493 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1); 2494 } 2495 } 2496 2497 // report a class referencing one of its interfaces. 2498 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) { 2499 if (is_basic_heap_walk()) { 2500 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1); 2501 } else { 2502 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1); 2503 } 2504 } 2505 2506 // report a class referencing one of its static fields. 2507 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) { 2508 if (is_basic_heap_walk()) { 2509 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2510 } else { 2511 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2512 } 2513 } 2514 2515 // report an array referencing an element object 2516 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) { 2517 if (is_basic_heap_walk()) { 2518 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2519 } else { 2520 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2521 } 2522 } 2523 2524 // report an object referencing an instance field object 2525 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) { 2526 if (is_basic_heap_walk()) { 2527 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot); 2528 } else { 2529 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot); 2530 } 2531 } 2532 2533 // report an array referencing an element object 2534 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) { 2535 if (is_basic_heap_walk()) { 2536 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2537 } else { 2538 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2539 } 2540 } 2541 2542 // A supporting closure used to process simple roots 2543 class SimpleRootsClosure : public OopClosure { 2544 private: 2545 jvmtiHeapReferenceKind _kind; 2546 bool _continue; 2547 2548 jvmtiHeapReferenceKind root_kind() { return _kind; } 2549 2550 public: 2551 void set_kind(jvmtiHeapReferenceKind kind) { 2552 _kind = kind; 2553 _continue = true; 2554 } 2555 2556 inline bool stopped() { 2557 return !_continue; 2558 } 2559 2560 void do_oop(oop* obj_p) { 2561 // iteration has terminated 2562 if (stopped()) { 2563 return; 2564 } 2565 2566 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p); 2567 // ignore null 2568 if (o == NULL) { 2569 return; 2570 } 2571 2572 assert(Universe::heap()->is_in(o), "should be impossible"); 2573 2574 jvmtiHeapReferenceKind kind = root_kind(); 2575 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) { 2576 // SystemDictionary::oops_do reports the application 2577 // class loader as a root. We want this root to be reported as 2578 // a root kind of "OTHER" rather than "SYSTEM_CLASS". 2579 if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) { 2580 kind = JVMTI_HEAP_REFERENCE_OTHER; 2581 } 2582 } 2583 2584 // invoke the callback 2585 _continue = CallbackInvoker::report_simple_root(kind, o); 2586 2587 } 2588 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2589 }; 2590 2591 // A supporting closure used to process JNI locals 2592 class JNILocalRootsClosure : public OopClosure { 2593 private: 2594 jlong _thread_tag; 2595 jlong _tid; 2596 jint _depth; 2597 jmethodID _method; 2598 bool _continue; 2599 public: 2600 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) { 2601 _thread_tag = thread_tag; 2602 _tid = tid; 2603 _depth = depth; 2604 _method = method; 2605 _continue = true; 2606 } 2607 2608 inline bool stopped() { 2609 return !_continue; 2610 } 2611 2612 void do_oop(oop* obj_p) { 2613 // iteration has terminated 2614 if (stopped()) { 2615 return; 2616 } 2617 2618 oop o = *obj_p; 2619 // ignore null 2620 if (o == NULL) { 2621 return; 2622 } 2623 2624 // invoke the callback 2625 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o); 2626 } 2627 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2628 }; 2629 2630 2631 // A VM operation to iterate over objects that are reachable from 2632 // a set of roots or an initial object. 2633 // 2634 // For VM_HeapWalkOperation the set of roots used is :- 2635 // 2636 // - All JNI global references 2637 // - All inflated monitors 2638 // - All classes loaded by the boot class loader (or all classes 2639 // in the event that class unloading is disabled) 2640 // - All java threads 2641 // - For each java thread then all locals and JNI local references 2642 // on the thread's execution stack 2643 // - All visible/explainable objects from Universes::oops_do 2644 // 2645 class VM_HeapWalkOperation: public VM_Operation { 2646 private: 2647 enum { 2648 initial_visit_stack_size = 4000 2649 }; 2650 2651 bool _is_advanced_heap_walk; // indicates FollowReferences 2652 JvmtiTagMap* _tag_map; 2653 Handle _initial_object; 2654 GrowableArray<oop>* _visit_stack; // the visit stack 2655 2656 bool _collecting_heap_roots; // are we collecting roots 2657 bool _following_object_refs; // are we following object references 2658 2659 bool _reporting_primitive_fields; // optional reporting 2660 bool _reporting_primitive_array_values; 2661 bool _reporting_string_values; 2662 2663 GrowableArray<oop>* create_visit_stack() { 2664 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true); 2665 } 2666 2667 // accessors 2668 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; } 2669 JvmtiTagMap* tag_map() const { return _tag_map; } 2670 Handle initial_object() const { return _initial_object; } 2671 2672 bool is_following_references() const { return _following_object_refs; } 2673 2674 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; } 2675 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; } 2676 bool is_reporting_string_values() const { return _reporting_string_values; } 2677 2678 GrowableArray<oop>* visit_stack() const { return _visit_stack; } 2679 2680 // iterate over the various object types 2681 inline bool iterate_over_array(oop o); 2682 inline bool iterate_over_type_array(oop o); 2683 inline bool iterate_over_class(oop o); 2684 inline bool iterate_over_object(oop o); 2685 2686 // root collection 2687 inline bool collect_simple_roots(); 2688 inline bool collect_stack_roots(); 2689 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk); 2690 2691 // visit an object 2692 inline bool visit(oop o); 2693 2694 public: 2695 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2696 Handle initial_object, 2697 BasicHeapWalkContext callbacks, 2698 const void* user_data); 2699 2700 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2701 Handle initial_object, 2702 AdvancedHeapWalkContext callbacks, 2703 const void* user_data); 2704 2705 ~VM_HeapWalkOperation(); 2706 2707 VMOp_Type type() const { return VMOp_HeapWalkOperation; } 2708 void doit(); 2709 }; 2710 2711 2712 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2713 Handle initial_object, 2714 BasicHeapWalkContext callbacks, 2715 const void* user_data) { 2716 _is_advanced_heap_walk = false; 2717 _tag_map = tag_map; 2718 _initial_object = initial_object; 2719 _following_object_refs = (callbacks.object_ref_callback() != NULL); 2720 _reporting_primitive_fields = false; 2721 _reporting_primitive_array_values = false; 2722 _reporting_string_values = false; 2723 _visit_stack = create_visit_stack(); 2724 2725 2726 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2727 } 2728 2729 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2730 Handle initial_object, 2731 AdvancedHeapWalkContext callbacks, 2732 const void* user_data) { 2733 _is_advanced_heap_walk = true; 2734 _tag_map = tag_map; 2735 _initial_object = initial_object; 2736 _following_object_refs = true; 2737 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);; 2738 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);; 2739 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);; 2740 _visit_stack = create_visit_stack(); 2741 2742 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2743 } 2744 2745 VM_HeapWalkOperation::~VM_HeapWalkOperation() { 2746 if (_following_object_refs) { 2747 assert(_visit_stack != NULL, "checking"); 2748 delete _visit_stack; 2749 _visit_stack = NULL; 2750 } 2751 } 2752 2753 // an array references its class and has a reference to 2754 // each element in the array 2755 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) { 2756 objArrayOop array = objArrayOop(o); 2757 2758 // array reference to its class 2759 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror(); 2760 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2761 return false; 2762 } 2763 2764 // iterate over the array and report each reference to a 2765 // non-null element 2766 for (int index=0; index<array->length(); index++) { 2767 oop elem = array->obj_at(index); 2768 if (elem == NULL) { 2769 continue; 2770 } 2771 2772 // report the array reference o[index] = elem 2773 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) { 2774 return false; 2775 } 2776 } 2777 return true; 2778 } 2779 2780 // a type array references its class 2781 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) { 2782 Klass* k = o->klass(); 2783 oop mirror = k->java_mirror(); 2784 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2785 return false; 2786 } 2787 2788 // report the array contents if required 2789 if (is_reporting_primitive_array_values()) { 2790 if (!CallbackInvoker::report_primitive_array_values(o)) { 2791 return false; 2792 } 2793 } 2794 return true; 2795 } 2796 2797 #ifdef ASSERT 2798 // verify that a static oop field is in range 2799 static inline bool verify_static_oop(InstanceKlass* ik, 2800 oop mirror, int offset) { 2801 address obj_p = (address)mirror + offset; 2802 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror); 2803 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize); 2804 assert(end >= start, "sanity check"); 2805 2806 if (obj_p >= start && obj_p < end) { 2807 return true; 2808 } else { 2809 return false; 2810 } 2811 } 2812 #endif // #ifdef ASSERT 2813 2814 // a class references its super class, interfaces, class loader, ... 2815 // and finally its static fields 2816 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) { 2817 int i; 2818 Klass* klass = java_lang_Class::as_Klass(java_class); 2819 2820 if (klass->is_instance_klass()) { 2821 InstanceKlass* ik = InstanceKlass::cast(klass); 2822 2823 // Ignore the class if it hasn't been initialized yet 2824 if (!ik->is_linked()) { 2825 return true; 2826 } 2827 2828 // get the java mirror 2829 oop mirror = klass->java_mirror(); 2830 2831 // super (only if something more interesting than java.lang.Object) 2832 InstanceKlass* java_super = ik->java_super(); 2833 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) { 2834 oop super = java_super->java_mirror(); 2835 if (!CallbackInvoker::report_superclass_reference(mirror, super)) { 2836 return false; 2837 } 2838 } 2839 2840 // class loader 2841 oop cl = ik->class_loader(); 2842 if (cl != NULL) { 2843 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) { 2844 return false; 2845 } 2846 } 2847 2848 // protection domain 2849 oop pd = ik->protection_domain(); 2850 if (pd != NULL) { 2851 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) { 2852 return false; 2853 } 2854 } 2855 2856 // signers 2857 oop signers = ik->signers(); 2858 if (signers != NULL) { 2859 if (!CallbackInvoker::report_signers_reference(mirror, signers)) { 2860 return false; 2861 } 2862 } 2863 2864 // references from the constant pool 2865 { 2866 ConstantPool* pool = ik->constants(); 2867 for (int i = 1; i < pool->length(); i++) { 2868 constantTag tag = pool->tag_at(i).value(); 2869 if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) { 2870 oop entry; 2871 if (tag.is_string()) { 2872 entry = pool->resolved_string_at(i); 2873 // If the entry is non-null it is resolved. 2874 if (entry == NULL) { 2875 continue; 2876 } 2877 } else if (tag.is_klass()) { 2878 entry = pool->resolved_klass_at(i)->java_mirror(); 2879 } else { 2880 // Code generated by JIT and AOT compilers might not resolve constant 2881 // pool entries. Treat them as resolved if they are loaded. 2882 assert(tag.is_unresolved_klass(), "must be"); 2883 constantPoolHandle cp(Thread::current(), pool); 2884 Klass* klass = ConstantPool::klass_at_if_loaded(cp, i); 2885 if (klass == NULL) { 2886 continue; 2887 } 2888 entry = klass->java_mirror(); 2889 } 2890 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) { 2891 return false; 2892 } 2893 } 2894 } 2895 } 2896 2897 // interfaces 2898 // (These will already have been reported as references from the constant pool 2899 // but are specified by IterateOverReachableObjects and must be reported). 2900 Array<InstanceKlass*>* interfaces = ik->local_interfaces(); 2901 for (i = 0; i < interfaces->length(); i++) { 2902 oop interf = interfaces->at(i)->java_mirror(); 2903 if (interf == NULL) { 2904 continue; 2905 } 2906 if (!CallbackInvoker::report_interface_reference(mirror, interf)) { 2907 return false; 2908 } 2909 } 2910 2911 // iterate over the static fields 2912 2913 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 2914 for (i=0; i<field_map->field_count(); i++) { 2915 ClassFieldDescriptor* field = field_map->field_at(i); 2916 char type = field->field_type(); 2917 if (!is_primitive_field_type(type)) { 2918 oop fld_o = mirror->obj_field(field->field_offset()); 2919 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check"); 2920 if (fld_o != NULL) { 2921 int slot = field->field_index(); 2922 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) { 2923 delete field_map; 2924 return false; 2925 } 2926 } 2927 } else { 2928 if (is_reporting_primitive_fields()) { 2929 address addr = (address)mirror + field->field_offset(); 2930 int slot = field->field_index(); 2931 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) { 2932 delete field_map; 2933 return false; 2934 } 2935 } 2936 } 2937 } 2938 delete field_map; 2939 2940 return true; 2941 } 2942 2943 return true; 2944 } 2945 2946 // an object references a class and its instance fields 2947 // (static fields are ignored here as we report these as 2948 // references from the class). 2949 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) { 2950 // reference to the class 2951 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) { 2952 return false; 2953 } 2954 2955 // iterate over instance fields 2956 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o); 2957 for (int i=0; i<field_map->field_count(); i++) { 2958 ClassFieldDescriptor* field = field_map->field_at(i); 2959 char type = field->field_type(); 2960 if (!is_primitive_field_type(type)) { 2961 oop fld_o = o->obj_field(field->field_offset()); 2962 // ignore any objects that aren't visible to profiler 2963 if (fld_o != NULL) { 2964 assert(Universe::heap()->is_in(fld_o), "unsafe code should not " 2965 "have references to Klass* anymore"); 2966 int slot = field->field_index(); 2967 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) { 2968 return false; 2969 } 2970 } 2971 } else { 2972 if (is_reporting_primitive_fields()) { 2973 // primitive instance field 2974 address addr = (address)o + field->field_offset(); 2975 int slot = field->field_index(); 2976 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) { 2977 return false; 2978 } 2979 } 2980 } 2981 } 2982 2983 // if the object is a java.lang.String 2984 if (is_reporting_string_values() && 2985 o->klass() == SystemDictionary::String_klass()) { 2986 if (!CallbackInvoker::report_string_value(o)) { 2987 return false; 2988 } 2989 } 2990 return true; 2991 } 2992 2993 2994 // Collects all simple (non-stack) roots except for threads; 2995 // threads are handled in collect_stack_roots() as an optimization. 2996 // if there's a heap root callback provided then the callback is 2997 // invoked for each simple root. 2998 // if an object reference callback is provided then all simple 2999 // roots are pushed onto the marking stack so that they can be 3000 // processed later 3001 // 3002 inline bool VM_HeapWalkOperation::collect_simple_roots() { 3003 SimpleRootsClosure blk; 3004 3005 // JNI globals 3006 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL); 3007 JNIHandles::oops_do(&blk); 3008 if (blk.stopped()) { 3009 return false; 3010 } 3011 3012 // Preloaded classes and loader from the system dictionary 3013 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS); 3014 SystemDictionary::oops_do(&blk); 3015 CLDToOopClosure cld_closure(&blk, false); 3016 ClassLoaderDataGraph::always_strong_cld_do(&cld_closure); 3017 if (blk.stopped()) { 3018 return false; 3019 } 3020 3021 // Inflated monitors 3022 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR); 3023 ObjectSynchronizer::oops_do(&blk); 3024 if (blk.stopped()) { 3025 return false; 3026 } 3027 3028 // threads are now handled in collect_stack_roots() 3029 3030 // Other kinds of roots maintained by HotSpot 3031 // Many of these won't be visible but others (such as instances of important 3032 // exceptions) will be visible. 3033 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 3034 Universe::oops_do(&blk); 3035 if (blk.stopped()) { 3036 return false; 3037 } 3038 3039 return true; 3040 } 3041 3042 // Walk the stack of a given thread and find all references (locals 3043 // and JNI calls) and report these as stack references 3044 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread, 3045 JNILocalRootsClosure* blk) 3046 { 3047 oop threadObj = java_thread->threadObj(); 3048 assert(threadObj != NULL, "sanity check"); 3049 3050 // only need to get the thread's tag once per thread 3051 jlong thread_tag = tag_for(_tag_map, threadObj); 3052 3053 // also need the thread id 3054 jlong tid = java_lang_Thread::thread_id(threadObj); 3055 3056 3057 if (java_thread->has_last_Java_frame()) { 3058 3059 // vframes are resource allocated 3060 Thread* current_thread = Thread::current(); 3061 ResourceMark rm(current_thread); 3062 HandleMark hm(current_thread); 3063 3064 RegisterMap reg_map(java_thread); 3065 frame f = java_thread->last_frame(); 3066 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); 3067 3068 bool is_top_frame = true; 3069 int depth = 0; 3070 frame* last_entry_frame = NULL; 3071 3072 while (vf != NULL) { 3073 if (vf->is_java_frame()) { 3074 3075 // java frame (interpreted, compiled, ...) 3076 javaVFrame *jvf = javaVFrame::cast(vf); 3077 3078 // the jmethodID 3079 jmethodID method = jvf->method()->jmethod_id(); 3080 3081 if (!(jvf->method()->is_native())) { 3082 jlocation bci = (jlocation)jvf->bci(); 3083 StackValueCollection* locals = jvf->locals(); 3084 for (int slot=0; slot<locals->size(); slot++) { 3085 if (locals->at(slot)->type() == T_OBJECT) { 3086 oop o = locals->obj_at(slot)(); 3087 if (o == NULL) { 3088 continue; 3089 } 3090 3091 // stack reference 3092 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3093 bci, slot, o)) { 3094 return false; 3095 } 3096 } 3097 } 3098 3099 StackValueCollection* exprs = jvf->expressions(); 3100 for (int index=0; index < exprs->size(); index++) { 3101 if (exprs->at(index)->type() == T_OBJECT) { 3102 oop o = exprs->obj_at(index)(); 3103 if (o == NULL) { 3104 continue; 3105 } 3106 3107 // stack reference 3108 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3109 bci, locals->size() + index, o)) { 3110 return false; 3111 } 3112 } 3113 } 3114 3115 // Follow oops from compiled nmethod 3116 if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) { 3117 blk->set_context(thread_tag, tid, depth, method); 3118 jvf->cb()->as_nmethod()->oops_do(blk); 3119 } 3120 } else { 3121 blk->set_context(thread_tag, tid, depth, method); 3122 if (is_top_frame) { 3123 // JNI locals for the top frame. 3124 java_thread->active_handles()->oops_do(blk); 3125 } else { 3126 if (last_entry_frame != NULL) { 3127 // JNI locals for the entry frame 3128 assert(last_entry_frame->is_entry_frame(), "checking"); 3129 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk); 3130 } 3131 } 3132 } 3133 last_entry_frame = NULL; 3134 depth++; 3135 } else { 3136 // externalVFrame - for an entry frame then we report the JNI locals 3137 // when we find the corresponding javaVFrame 3138 frame* fr = vf->frame_pointer(); 3139 assert(fr != NULL, "sanity check"); 3140 if (fr->is_entry_frame()) { 3141 last_entry_frame = fr; 3142 } 3143 } 3144 3145 vf = vf->sender(); 3146 is_top_frame = false; 3147 } 3148 } else { 3149 // no last java frame but there may be JNI locals 3150 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL); 3151 java_thread->active_handles()->oops_do(blk); 3152 } 3153 return true; 3154 } 3155 3156 3157 // Collects the simple roots for all threads and collects all 3158 // stack roots - for each thread it walks the execution 3159 // stack to find all references and local JNI refs. 3160 inline bool VM_HeapWalkOperation::collect_stack_roots() { 3161 JNILocalRootsClosure blk; 3162 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) { 3163 oop threadObj = thread->threadObj(); 3164 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) { 3165 // Collect the simple root for this thread before we 3166 // collect its stack roots 3167 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, 3168 threadObj)) { 3169 return false; 3170 } 3171 if (!collect_stack_roots(thread, &blk)) { 3172 return false; 3173 } 3174 } 3175 } 3176 return true; 3177 } 3178 3179 // visit an object 3180 // first mark the object as visited 3181 // second get all the outbound references from this object (in other words, all 3182 // the objects referenced by this object). 3183 // 3184 bool VM_HeapWalkOperation::visit(oop o) { 3185 // mark object as visited 3186 assert(!ObjectMarker::visited(o), "can't visit same object more than once"); 3187 ObjectMarker::mark(o); 3188 3189 // instance 3190 if (o->is_instance()) { 3191 if (o->klass() == SystemDictionary::Class_klass()) { 3192 if (!java_lang_Class::is_primitive(o)) { 3193 // a java.lang.Class 3194 return iterate_over_class(o); 3195 } 3196 } else { 3197 return iterate_over_object(o); 3198 } 3199 } 3200 3201 // object array 3202 if (o->is_objArray()) { 3203 return iterate_over_array(o); 3204 } 3205 3206 // type array 3207 if (o->is_typeArray()) { 3208 return iterate_over_type_array(o); 3209 } 3210 3211 return true; 3212 } 3213 3214 void VM_HeapWalkOperation::doit() { 3215 ResourceMark rm; 3216 ObjectMarkerController marker; 3217 ClassFieldMapCacheMark cm; 3218 3219 assert(visit_stack()->is_empty(), "visit stack must be empty"); 3220 3221 // the heap walk starts with an initial object or the heap roots 3222 if (initial_object().is_null()) { 3223 // If either collect_stack_roots() or collect_simple_roots() 3224 // returns false at this point, then there are no mark bits 3225 // to reset. 3226 ObjectMarker::set_needs_reset(false); 3227 3228 // Calling collect_stack_roots() before collect_simple_roots() 3229 // can result in a big performance boost for an agent that is 3230 // focused on analyzing references in the thread stacks. 3231 if (!collect_stack_roots()) return; 3232 3233 if (!collect_simple_roots()) return; 3234 3235 // no early return so enable heap traversal to reset the mark bits 3236 ObjectMarker::set_needs_reset(true); 3237 } else { 3238 visit_stack()->push(initial_object()()); 3239 } 3240 3241 // object references required 3242 if (is_following_references()) { 3243 3244 // visit each object until all reachable objects have been 3245 // visited or the callback asked to terminate the iteration. 3246 while (!visit_stack()->is_empty()) { 3247 oop o = visit_stack()->pop(); 3248 if (!ObjectMarker::visited(o)) { 3249 if (!visit(o)) { 3250 break; 3251 } 3252 } 3253 } 3254 } 3255 } 3256 3257 // iterate over all objects that are reachable from a set of roots 3258 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback, 3259 jvmtiStackReferenceCallback stack_ref_callback, 3260 jvmtiObjectReferenceCallback object_ref_callback, 3261 const void* user_data) { 3262 MutexLocker ml(Heap_lock); 3263 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback); 3264 VM_HeapWalkOperation op(this, Handle(), context, user_data); 3265 VMThread::execute(&op); 3266 } 3267 3268 // iterate over all objects that are reachable from a given object 3269 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object, 3270 jvmtiObjectReferenceCallback object_ref_callback, 3271 const void* user_data) { 3272 oop obj = JNIHandles::resolve(object); 3273 Handle initial_object(Thread::current(), obj); 3274 3275 MutexLocker ml(Heap_lock); 3276 BasicHeapWalkContext context(NULL, NULL, object_ref_callback); 3277 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3278 VMThread::execute(&op); 3279 } 3280 3281 // follow references from an initial object or the GC roots 3282 void JvmtiTagMap::follow_references(jint heap_filter, 3283 Klass* klass, 3284 jobject object, 3285 const jvmtiHeapCallbacks* callbacks, 3286 const void* user_data) 3287 { 3288 oop obj = JNIHandles::resolve(object); 3289 Handle initial_object(Thread::current(), obj); 3290 3291 MutexLocker ml(Heap_lock); 3292 AdvancedHeapWalkContext context(heap_filter, klass, callbacks); 3293 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3294 VMThread::execute(&op); 3295 } 3296 3297 3298 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { 3299 // No locks during VM bring-up (0 threads) and no safepoints after main 3300 // thread creation and before VMThread creation (1 thread); initial GC 3301 // verification can happen in that window which gets to here. 3302 assert(Threads::number_of_threads() <= 1 || 3303 SafepointSynchronize::is_at_safepoint(), 3304 "must be executed at a safepoint"); 3305 if (JvmtiEnv::environments_might_exist()) { 3306 JvmtiEnvIterator it; 3307 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) { 3308 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3309 if (tag_map != NULL && !tag_map->is_empty()) { 3310 tag_map->do_weak_oops(is_alive, f); 3311 } 3312 } 3313 } 3314 } 3315 3316 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) { 3317 3318 // does this environment have the OBJECT_FREE event enabled 3319 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE); 3320 3321 // counters used for trace message 3322 int freed = 0; 3323 int moved = 0; 3324 3325 JvmtiTagHashmap* hashmap = this->hashmap(); 3326 3327 // reenable sizing (if disabled) 3328 hashmap->set_resizing_enabled(true); 3329 3330 // if the hashmap is empty then we can skip it 3331 if (hashmap->_entry_count == 0) { 3332 return; 3333 } 3334 3335 // now iterate through each entry in the table 3336 3337 JvmtiTagHashmapEntry** table = hashmap->table(); 3338 int size = hashmap->size(); 3339 3340 JvmtiTagHashmapEntry* delayed_add = NULL; 3341 3342 for (int pos = 0; pos < size; ++pos) { 3343 JvmtiTagHashmapEntry* entry = table[pos]; 3344 JvmtiTagHashmapEntry* prev = NULL; 3345 3346 while (entry != NULL) { 3347 JvmtiTagHashmapEntry* next = entry->next(); 3348 3349 // has object been GC'ed 3350 if (!is_alive->do_object_b(entry->object_peek())) { 3351 // grab the tag 3352 jlong tag = entry->tag(); 3353 guarantee(tag != 0, "checking"); 3354 3355 // remove GC'ed entry from hashmap and return the 3356 // entry to the free list 3357 hashmap->remove(prev, pos, entry); 3358 destroy_entry(entry); 3359 3360 // post the event to the profiler 3361 if (post_object_free) { 3362 JvmtiExport::post_object_free(env(), tag); 3363 } 3364 3365 ++freed; 3366 } else { 3367 f->do_oop(entry->object_addr()); 3368 oop new_oop = entry->object_peek(); 3369 3370 // if the object has moved then re-hash it and move its 3371 // entry to its new location. 3372 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size); 3373 if (new_pos != (unsigned int)pos) { 3374 if (prev == NULL) { 3375 table[pos] = next; 3376 } else { 3377 prev->set_next(next); 3378 } 3379 if (new_pos < (unsigned int)pos) { 3380 entry->set_next(table[new_pos]); 3381 table[new_pos] = entry; 3382 } else { 3383 // Delay adding this entry to it's new position as we'd end up 3384 // hitting it again during this iteration. 3385 entry->set_next(delayed_add); 3386 delayed_add = entry; 3387 } 3388 moved++; 3389 } else { 3390 // object didn't move 3391 prev = entry; 3392 } 3393 } 3394 3395 entry = next; 3396 } 3397 } 3398 3399 // Re-add all the entries which were kept aside 3400 while (delayed_add != NULL) { 3401 JvmtiTagHashmapEntry* next = delayed_add->next(); 3402 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object_peek(), size); 3403 delayed_add->set_next(table[pos]); 3404 table[pos] = delayed_add; 3405 delayed_add = next; 3406 } 3407 3408 log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)", 3409 hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved); 3410 }