1 /* 2 * Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/javaClasses.inline.hpp" 27 #include "classfile/symbolTable.hpp" 28 #include "classfile/systemDictionary.hpp" 29 #include "classfile/vmSymbols.hpp" 30 #include "code/codeCache.hpp" 31 #include "jvmtifiles/jvmtiEnv.hpp" 32 #include "memory/resourceArea.hpp" 33 #include "oops/instanceMirrorKlass.hpp" 34 #include "oops/objArrayKlass.hpp" 35 #include "oops/objArrayOop.inline.hpp" 36 #include "oops/oop.inline.hpp" 37 #include "prims/jvmtiEventController.hpp" 38 #include "prims/jvmtiEventController.inline.hpp" 39 #include "prims/jvmtiExport.hpp" 40 #include "prims/jvmtiImpl.hpp" 41 #include "prims/jvmtiTagMap.hpp" 42 #include "runtime/biasedLocking.hpp" 43 #include "runtime/javaCalls.hpp" 44 #include "runtime/jniHandles.hpp" 45 #include "runtime/mutex.hpp" 46 #include "runtime/mutexLocker.hpp" 47 #include "runtime/reflectionUtils.hpp" 48 #include "runtime/vframe.hpp" 49 #include "runtime/vmThread.hpp" 50 #include "runtime/vm_operations.hpp" 51 #include "services/serviceUtil.hpp" 52 #include "utilities/macros.hpp" 53 #if INCLUDE_ALL_GCS 54 #include "gc/parallel/parallelScavengeHeap.hpp" 55 #endif // INCLUDE_ALL_GCS 56 57 // JvmtiTagHashmapEntry 58 // 59 // Each entry encapsulates a reference to the tagged object 60 // and the tag value. In addition an entry includes a next pointer which 61 // is used to chain entries together. 62 63 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> { 64 private: 65 friend class JvmtiTagMap; 66 67 oop _object; // tagged object 68 jlong _tag; // the tag 69 JvmtiTagHashmapEntry* _next; // next on the list 70 71 inline void init(oop object, jlong tag) { 72 _object = object; 73 _tag = tag; 74 _next = NULL; 75 } 76 77 // constructor 78 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); } 79 80 public: 81 82 // accessor methods 83 inline oop object() const { return _object; } 84 inline oop* object_addr() { return &_object; } 85 inline jlong tag() const { return _tag; } 86 87 inline void set_tag(jlong tag) { 88 assert(tag != 0, "can't be zero"); 89 _tag = tag; 90 } 91 92 inline JvmtiTagHashmapEntry* next() const { return _next; } 93 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; } 94 }; 95 96 97 // JvmtiTagHashmap 98 // 99 // A hashmap is essentially a table of pointers to entries. Entries 100 // are hashed to a location, or position in the table, and then 101 // chained from that location. The "key" for hashing is address of 102 // the object, or oop. The "value" is the tag value. 103 // 104 // A hashmap maintains a count of the number entries in the hashmap 105 // and resizes if the number of entries exceeds a given threshold. 106 // The threshold is specified as a percentage of the size - for 107 // example a threshold of 0.75 will trigger the hashmap to resize 108 // if the number of entries is >75% of table size. 109 // 110 // A hashmap provides functions for adding, removing, and finding 111 // entries. It also provides a function to iterate over all entries 112 // in the hashmap. 113 114 class JvmtiTagHashmap : public CHeapObj<mtInternal> { 115 private: 116 friend class JvmtiTagMap; 117 118 enum { 119 small_trace_threshold = 10000, // threshold for tracing 120 medium_trace_threshold = 100000, 121 large_trace_threshold = 1000000, 122 initial_trace_threshold = small_trace_threshold 123 }; 124 125 static int _sizes[]; // array of possible hashmap sizes 126 int _size; // actual size of the table 127 int _size_index; // index into size table 128 129 int _entry_count; // number of entries in the hashmap 130 131 float _load_factor; // load factor as a % of the size 132 int _resize_threshold; // computed threshold to trigger resizing. 133 bool _resizing_enabled; // indicates if hashmap can resize 134 135 int _trace_threshold; // threshold for trace messages 136 137 JvmtiTagHashmapEntry** _table; // the table of entries. 138 139 // private accessors 140 int resize_threshold() const { return _resize_threshold; } 141 int trace_threshold() const { return _trace_threshold; } 142 143 // initialize the hashmap 144 void init(int size_index=0, float load_factor=4.0f) { 145 int initial_size = _sizes[size_index]; 146 _size_index = size_index; 147 _size = initial_size; 148 _entry_count = 0; 149 _trace_threshold = initial_trace_threshold; 150 _load_factor = load_factor; 151 _resize_threshold = (int)(_load_factor * _size); 152 _resizing_enabled = true; 153 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*); 154 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 155 if (_table == NULL) { 156 vm_exit_out_of_memory(s, OOM_MALLOC_ERROR, 157 "unable to allocate initial hashtable for jvmti object tags"); 158 } 159 for (int i=0; i<initial_size; i++) { 160 _table[i] = NULL; 161 } 162 } 163 164 // hash a given key (oop) with the specified size 165 static unsigned int hash(oop key, int size) { 166 // shift right to get better distribution (as these bits will be zero 167 // with aligned addresses) 168 unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key)); 169 #ifdef _LP64 170 return (addr >> 3) % size; 171 #else 172 return (addr >> 2) % size; 173 #endif 174 } 175 176 // hash a given key (oop) 177 unsigned int hash(oop key) { 178 return hash(key, _size); 179 } 180 181 // resize the hashmap - allocates a large table and re-hashes 182 // all entries into the new table. 183 void resize() { 184 int new_size_index = _size_index+1; 185 int new_size = _sizes[new_size_index]; 186 if (new_size < 0) { 187 // hashmap already at maximum capacity 188 return; 189 } 190 191 // allocate new table 192 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*); 193 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 194 if (new_table == NULL) { 195 warning("unable to allocate larger hashtable for jvmti object tags"); 196 set_resizing_enabled(false); 197 return; 198 } 199 200 // initialize new table 201 int i; 202 for (i=0; i<new_size; i++) { 203 new_table[i] = NULL; 204 } 205 206 // rehash all entries into the new table 207 for (i=0; i<_size; i++) { 208 JvmtiTagHashmapEntry* entry = _table[i]; 209 while (entry != NULL) { 210 JvmtiTagHashmapEntry* next = entry->next(); 211 oop key = entry->object(); 212 assert(key != NULL, "jni weak reference cleared!!"); 213 unsigned int h = hash(key, new_size); 214 JvmtiTagHashmapEntry* anchor = new_table[h]; 215 if (anchor == NULL) { 216 new_table[h] = entry; 217 entry->set_next(NULL); 218 } else { 219 entry->set_next(anchor); 220 new_table[h] = entry; 221 } 222 entry = next; 223 } 224 } 225 226 // free old table and update settings. 227 os::free((void*)_table); 228 _table = new_table; 229 _size_index = new_size_index; 230 _size = new_size; 231 232 // compute new resize threshold 233 _resize_threshold = (int)(_load_factor * _size); 234 } 235 236 237 // internal remove function - remove an entry at a given position in the 238 // table. 239 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) { 240 assert(pos >= 0 && pos < _size, "out of range"); 241 if (prev == NULL) { 242 _table[pos] = entry->next(); 243 } else { 244 prev->set_next(entry->next()); 245 } 246 assert(_entry_count > 0, "checking"); 247 _entry_count--; 248 } 249 250 // resizing switch 251 bool is_resizing_enabled() const { return _resizing_enabled; } 252 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; } 253 254 // debugging 255 void print_memory_usage(); 256 void compute_next_trace_threshold(); 257 258 public: 259 260 // create a JvmtiTagHashmap of a preferred size and optionally a load factor. 261 // The preferred size is rounded down to an actual size. 262 JvmtiTagHashmap(int size, float load_factor=0.0f) { 263 int i=0; 264 while (_sizes[i] < size) { 265 if (_sizes[i] < 0) { 266 assert(i > 0, "sanity check"); 267 i--; 268 break; 269 } 270 i++; 271 } 272 273 // if a load factor is specified then use it, otherwise use default 274 if (load_factor > 0.01f) { 275 init(i, load_factor); 276 } else { 277 init(i); 278 } 279 } 280 281 // create a JvmtiTagHashmap with default settings 282 JvmtiTagHashmap() { 283 init(); 284 } 285 286 // release table when JvmtiTagHashmap destroyed 287 ~JvmtiTagHashmap() { 288 if (_table != NULL) { 289 os::free((void*)_table); 290 _table = NULL; 291 } 292 } 293 294 // accessors 295 int size() const { return _size; } 296 JvmtiTagHashmapEntry** table() const { return _table; } 297 int entry_count() const { return _entry_count; } 298 299 // find an entry in the hashmap, returns NULL if not found. 300 inline JvmtiTagHashmapEntry* find(oop key) { 301 unsigned int h = hash(key); 302 JvmtiTagHashmapEntry* entry = _table[h]; 303 while (entry != NULL) { 304 if (entry->object() == key) { 305 return entry; 306 } 307 entry = entry->next(); 308 } 309 return NULL; 310 } 311 312 313 // add a new entry to hashmap 314 inline void add(oop key, JvmtiTagHashmapEntry* entry) { 315 assert(key != NULL, "checking"); 316 assert(find(key) == NULL, "duplicate detected"); 317 unsigned int h = hash(key); 318 JvmtiTagHashmapEntry* anchor = _table[h]; 319 if (anchor == NULL) { 320 _table[h] = entry; 321 entry->set_next(NULL); 322 } else { 323 entry->set_next(anchor); 324 _table[h] = entry; 325 } 326 327 _entry_count++; 328 if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) { 329 print_memory_usage(); 330 compute_next_trace_threshold(); 331 } 332 333 // if the number of entries exceed the threshold then resize 334 if (entry_count() > resize_threshold() && is_resizing_enabled()) { 335 resize(); 336 } 337 } 338 339 // remove an entry with the given key. 340 inline JvmtiTagHashmapEntry* remove(oop key) { 341 unsigned int h = hash(key); 342 JvmtiTagHashmapEntry* entry = _table[h]; 343 JvmtiTagHashmapEntry* prev = NULL; 344 while (entry != NULL) { 345 if (key == entry->object()) { 346 break; 347 } 348 prev = entry; 349 entry = entry->next(); 350 } 351 if (entry != NULL) { 352 remove(prev, h, entry); 353 } 354 return entry; 355 } 356 357 // iterate over all entries in the hashmap 358 void entry_iterate(JvmtiTagHashmapEntryClosure* closure); 359 }; 360 361 // possible hashmap sizes - odd primes that roughly double in size. 362 // To avoid excessive resizing the odd primes from 4801-76831 and 363 // 76831-307261 have been removed. The list must be terminated by -1. 364 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891, 365 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 }; 366 367 368 // A supporting class for iterating over all entries in Hashmap 369 class JvmtiTagHashmapEntryClosure { 370 public: 371 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0; 372 }; 373 374 375 // iterate over all entries in the hashmap 376 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 377 for (int i=0; i<_size; i++) { 378 JvmtiTagHashmapEntry* entry = _table[i]; 379 JvmtiTagHashmapEntry* prev = NULL; 380 while (entry != NULL) { 381 // obtain the next entry before invoking do_entry - this is 382 // necessary because do_entry may remove the entry from the 383 // hashmap. 384 JvmtiTagHashmapEntry* next = entry->next(); 385 closure->do_entry(entry); 386 entry = next; 387 } 388 } 389 } 390 391 // debugging 392 void JvmtiTagHashmap::print_memory_usage() { 393 intptr_t p = (intptr_t)this; 394 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p); 395 396 // table + entries in KB 397 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) + 398 entry_count()*sizeof(JvmtiTagHashmapEntry))/K; 399 400 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K); 401 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]", 402 entry_count(), hashmap_usage, weak_globals_usage); 403 } 404 405 // compute threshold for the next trace message 406 void JvmtiTagHashmap::compute_next_trace_threshold() { 407 _trace_threshold = entry_count(); 408 if (trace_threshold() < medium_trace_threshold) { 409 _trace_threshold += small_trace_threshold; 410 } else { 411 if (trace_threshold() < large_trace_threshold) { 412 _trace_threshold += medium_trace_threshold; 413 } else { 414 _trace_threshold += large_trace_threshold; 415 } 416 } 417 } 418 419 // create a JvmtiTagMap 420 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) : 421 _env(env), 422 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false), 423 _free_entries(NULL), 424 _free_entries_count(0) 425 { 426 assert(JvmtiThreadState_lock->is_locked(), "sanity check"); 427 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment"); 428 429 _hashmap = new JvmtiTagHashmap(); 430 431 // finally add us to the environment 432 ((JvmtiEnvBase *)env)->set_tag_map(this); 433 } 434 435 436 // destroy a JvmtiTagMap 437 JvmtiTagMap::~JvmtiTagMap() { 438 439 // no lock acquired as we assume the enclosing environment is 440 // also being destroryed. 441 ((JvmtiEnvBase *)_env)->set_tag_map(NULL); 442 443 JvmtiTagHashmapEntry** table = _hashmap->table(); 444 for (int j = 0; j < _hashmap->size(); j++) { 445 JvmtiTagHashmapEntry* entry = table[j]; 446 while (entry != NULL) { 447 JvmtiTagHashmapEntry* next = entry->next(); 448 delete entry; 449 entry = next; 450 } 451 } 452 453 // finally destroy the hashmap 454 delete _hashmap; 455 _hashmap = NULL; 456 457 // remove any entries on the free list 458 JvmtiTagHashmapEntry* entry = _free_entries; 459 while (entry != NULL) { 460 JvmtiTagHashmapEntry* next = entry->next(); 461 delete entry; 462 entry = next; 463 } 464 _free_entries = NULL; 465 } 466 467 // create a hashmap entry 468 // - if there's an entry on the (per-environment) free list then this 469 // is returned. Otherwise an new entry is allocated. 470 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) { 471 assert(Thread::current()->is_VM_thread() || is_locked(), "checking"); 472 JvmtiTagHashmapEntry* entry; 473 if (_free_entries == NULL) { 474 entry = new JvmtiTagHashmapEntry(ref, tag); 475 } else { 476 assert(_free_entries_count > 0, "mismatched _free_entries_count"); 477 _free_entries_count--; 478 entry = _free_entries; 479 _free_entries = entry->next(); 480 entry->init(ref, tag); 481 } 482 return entry; 483 } 484 485 // destroy an entry by returning it to the free list 486 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) { 487 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 488 // limit the size of the free list 489 if (_free_entries_count >= max_free_entries) { 490 delete entry; 491 } else { 492 entry->set_next(_free_entries); 493 _free_entries = entry; 494 _free_entries_count++; 495 } 496 } 497 498 // returns the tag map for the given environments. If the tag map 499 // doesn't exist then it is created. 500 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) { 501 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map(); 502 if (tag_map == NULL) { 503 MutexLocker mu(JvmtiThreadState_lock); 504 tag_map = ((JvmtiEnvBase*)env)->tag_map(); 505 if (tag_map == NULL) { 506 tag_map = new JvmtiTagMap(env); 507 } 508 } else { 509 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); 510 } 511 return tag_map; 512 } 513 514 // iterate over all entries in the tag map. 515 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 516 hashmap()->entry_iterate(closure); 517 } 518 519 // returns true if the hashmaps are empty 520 bool JvmtiTagMap::is_empty() { 521 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 522 return hashmap()->entry_count() == 0; 523 } 524 525 526 // Return the tag value for an object, or 0 if the object is 527 // not tagged 528 // 529 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) { 530 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o); 531 if (entry == NULL) { 532 return 0; 533 } else { 534 return entry->tag(); 535 } 536 } 537 538 539 // A CallbackWrapper is a support class for querying and tagging an object 540 // around a callback to a profiler. The constructor does pre-callback 541 // work to get the tag value, klass tag value, ... and the destructor 542 // does the post-callback work of tagging or untagging the object. 543 // 544 // { 545 // CallbackWrapper wrapper(tag_map, o); 546 // 547 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...) 548 // 549 // } // wrapper goes out of scope here which results in the destructor 550 // checking to see if the object has been tagged, untagged, or the 551 // tag value has changed. 552 // 553 class CallbackWrapper : public StackObj { 554 private: 555 JvmtiTagMap* _tag_map; 556 JvmtiTagHashmap* _hashmap; 557 JvmtiTagHashmapEntry* _entry; 558 oop _o; 559 jlong _obj_size; 560 jlong _obj_tag; 561 jlong _klass_tag; 562 563 protected: 564 JvmtiTagMap* tag_map() const { return _tag_map; } 565 566 // invoked post-callback to tag, untag, or update the tag of an object 567 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap, 568 JvmtiTagHashmapEntry* entry, jlong obj_tag); 569 public: 570 CallbackWrapper(JvmtiTagMap* tag_map, oop o) { 571 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(), 572 "MT unsafe or must be VM thread"); 573 574 // object to tag 575 _o = o; 576 577 // object size 578 _obj_size = (jlong)_o->size() * wordSize; 579 580 // record the context 581 _tag_map = tag_map; 582 _hashmap = tag_map->hashmap(); 583 _entry = _hashmap->find(_o); 584 585 // get object tag 586 _obj_tag = (_entry == NULL) ? 0 : _entry->tag(); 587 588 // get the class and the class's tag value 589 assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?"); 590 591 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror()); 592 } 593 594 ~CallbackWrapper() { 595 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag); 596 } 597 598 inline jlong* obj_tag_p() { return &_obj_tag; } 599 inline jlong obj_size() const { return _obj_size; } 600 inline jlong obj_tag() const { return _obj_tag; } 601 inline jlong klass_tag() const { return _klass_tag; } 602 }; 603 604 605 606 // callback post-callback to tag, untag, or update the tag of an object 607 void inline CallbackWrapper::post_callback_tag_update(oop o, 608 JvmtiTagHashmap* hashmap, 609 JvmtiTagHashmapEntry* entry, 610 jlong obj_tag) { 611 if (entry == NULL) { 612 if (obj_tag != 0) { 613 // callback has tagged the object 614 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 615 entry = tag_map()->create_entry(o, obj_tag); 616 hashmap->add(o, entry); 617 } 618 } else { 619 // object was previously tagged - the callback may have untagged 620 // the object or changed the tag value 621 if (obj_tag == 0) { 622 623 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o); 624 assert(entry_removed == entry, "checking"); 625 tag_map()->destroy_entry(entry); 626 627 } else { 628 if (obj_tag != entry->tag()) { 629 entry->set_tag(obj_tag); 630 } 631 } 632 } 633 } 634 635 // An extended CallbackWrapper used when reporting an object reference 636 // to the agent. 637 // 638 // { 639 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o); 640 // 641 // (*callback)(wrapper.klass_tag(), 642 // wrapper.obj_size(), 643 // wrapper.obj_tag_p() 644 // wrapper.referrer_tag_p(), ...) 645 // 646 // } // wrapper goes out of scope here which results in the destructor 647 // checking to see if the referrer object has been tagged, untagged, 648 // or the tag value has changed. 649 // 650 class TwoOopCallbackWrapper : public CallbackWrapper { 651 private: 652 bool _is_reference_to_self; 653 JvmtiTagHashmap* _referrer_hashmap; 654 JvmtiTagHashmapEntry* _referrer_entry; 655 oop _referrer; 656 jlong _referrer_obj_tag; 657 jlong _referrer_klass_tag; 658 jlong* _referrer_tag_p; 659 660 bool is_reference_to_self() const { return _is_reference_to_self; } 661 662 public: 663 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) : 664 CallbackWrapper(tag_map, o) 665 { 666 // self reference needs to be handled in a special way 667 _is_reference_to_self = (referrer == o); 668 669 if (_is_reference_to_self) { 670 _referrer_klass_tag = klass_tag(); 671 _referrer_tag_p = obj_tag_p(); 672 } else { 673 _referrer = referrer; 674 // record the context 675 _referrer_hashmap = tag_map->hashmap(); 676 _referrer_entry = _referrer_hashmap->find(_referrer); 677 678 // get object tag 679 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag(); 680 _referrer_tag_p = &_referrer_obj_tag; 681 682 // get referrer class tag. 683 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror()); 684 } 685 } 686 687 ~TwoOopCallbackWrapper() { 688 if (!is_reference_to_self()){ 689 post_callback_tag_update(_referrer, 690 _referrer_hashmap, 691 _referrer_entry, 692 _referrer_obj_tag); 693 } 694 } 695 696 // address of referrer tag 697 // (for a self reference this will return the same thing as obj_tag_p()) 698 inline jlong* referrer_tag_p() { return _referrer_tag_p; } 699 700 // referrer's class tag 701 inline jlong referrer_klass_tag() { return _referrer_klass_tag; } 702 }; 703 704 // tag an object 705 // 706 // This function is performance critical. If many threads attempt to tag objects 707 // around the same time then it's possible that the Mutex associated with the 708 // tag map will be a hot lock. 709 void JvmtiTagMap::set_tag(jobject object, jlong tag) { 710 MutexLocker ml(lock()); 711 712 // resolve the object 713 oop o = JNIHandles::resolve_non_null(object); 714 715 // see if the object is already tagged 716 JvmtiTagHashmap* hashmap = _hashmap; 717 JvmtiTagHashmapEntry* entry = hashmap->find(o); 718 719 // if the object is not already tagged then we tag it 720 if (entry == NULL) { 721 if (tag != 0) { 722 entry = create_entry(o, tag); 723 hashmap->add(o, entry); 724 } else { 725 // no-op 726 } 727 } else { 728 // if the object is already tagged then we either update 729 // the tag (if a new tag value has been provided) 730 // or remove the object if the new tag value is 0. 731 if (tag == 0) { 732 hashmap->remove(o); 733 destroy_entry(entry); 734 } else { 735 entry->set_tag(tag); 736 } 737 } 738 } 739 740 // get the tag for an object 741 jlong JvmtiTagMap::get_tag(jobject object) { 742 MutexLocker ml(lock()); 743 744 // resolve the object 745 oop o = JNIHandles::resolve_non_null(object); 746 747 return tag_for(this, o); 748 } 749 750 751 // Helper class used to describe the static or instance fields of a class. 752 // For each field it holds the field index (as defined by the JVMTI specification), 753 // the field type, and the offset. 754 755 class ClassFieldDescriptor: public CHeapObj<mtInternal> { 756 private: 757 int _field_index; 758 int _field_offset; 759 char _field_type; 760 public: 761 ClassFieldDescriptor(int index, char type, int offset) : 762 _field_index(index), _field_type(type), _field_offset(offset) { 763 } 764 int field_index() const { return _field_index; } 765 char field_type() const { return _field_type; } 766 int field_offset() const { return _field_offset; } 767 }; 768 769 class ClassFieldMap: public CHeapObj<mtInternal> { 770 private: 771 enum { 772 initial_field_count = 5 773 }; 774 775 // list of field descriptors 776 GrowableArray<ClassFieldDescriptor*>* _fields; 777 778 // constructor 779 ClassFieldMap(); 780 781 // add a field 782 void add(int index, char type, int offset); 783 784 // returns the field count for the given class 785 static int compute_field_count(InstanceKlass* ik); 786 787 public: 788 ~ClassFieldMap(); 789 790 // access 791 int field_count() { return _fields->length(); } 792 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); } 793 794 // functions to create maps of static or instance fields 795 static ClassFieldMap* create_map_of_static_fields(Klass* k); 796 static ClassFieldMap* create_map_of_instance_fields(oop obj); 797 }; 798 799 ClassFieldMap::ClassFieldMap() { 800 _fields = new (ResourceObj::C_HEAP, mtInternal) 801 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true); 802 } 803 804 ClassFieldMap::~ClassFieldMap() { 805 for (int i=0; i<_fields->length(); i++) { 806 delete _fields->at(i); 807 } 808 delete _fields; 809 } 810 811 void ClassFieldMap::add(int index, char type, int offset) { 812 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset); 813 _fields->append(field); 814 } 815 816 // Returns a heap allocated ClassFieldMap to describe the static fields 817 // of the given class. 818 // 819 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) { 820 HandleMark hm; 821 InstanceKlass* ik = InstanceKlass::cast(k); 822 823 // create the field map 824 ClassFieldMap* field_map = new ClassFieldMap(); 825 826 FilteredFieldStream f(ik, false, false); 827 int max_field_index = f.field_count()-1; 828 829 int index = 0; 830 for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) { 831 // ignore instance fields 832 if (!fld.access_flags().is_static()) { 833 continue; 834 } 835 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset()); 836 } 837 return field_map; 838 } 839 840 // Returns a heap allocated ClassFieldMap to describe the instance fields 841 // of the given class. All instance fields are included (this means public 842 // and private fields declared in superclasses and superinterfaces too). 843 // 844 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) { 845 HandleMark hm; 846 InstanceKlass* ik = InstanceKlass::cast(obj->klass()); 847 848 // create the field map 849 ClassFieldMap* field_map = new ClassFieldMap(); 850 851 FilteredFieldStream f(ik, false, false); 852 853 int max_field_index = f.field_count()-1; 854 855 int index = 0; 856 for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) { 857 // ignore static fields 858 if (fld.access_flags().is_static()) { 859 continue; 860 } 861 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset()); 862 } 863 864 return field_map; 865 } 866 867 // Helper class used to cache a ClassFileMap for the instance fields of 868 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during 869 // heap iteration and avoid creating a field map for each object in the heap 870 // (only need to create the map when the first instance of a class is encountered). 871 // 872 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> { 873 private: 874 enum { 875 initial_class_count = 200 876 }; 877 ClassFieldMap* _field_map; 878 879 ClassFieldMap* field_map() const { return _field_map; } 880 881 JvmtiCachedClassFieldMap(ClassFieldMap* field_map); 882 ~JvmtiCachedClassFieldMap(); 883 884 static GrowableArray<InstanceKlass*>* _class_list; 885 static void add_to_class_list(InstanceKlass* ik); 886 887 public: 888 // returns the field map for a given object (returning map cached 889 // by InstanceKlass if possible 890 static ClassFieldMap* get_map_of_instance_fields(oop obj); 891 892 // removes the field map from all instanceKlasses - should be 893 // called before VM operation completes 894 static void clear_cache(); 895 896 // returns the number of ClassFieldMap cached by instanceKlasses 897 static int cached_field_map_count(); 898 }; 899 900 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list; 901 902 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) { 903 _field_map = field_map; 904 } 905 906 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() { 907 if (_field_map != NULL) { 908 delete _field_map; 909 } 910 } 911 912 // Marker class to ensure that the class file map cache is only used in a defined 913 // scope. 914 class ClassFieldMapCacheMark : public StackObj { 915 private: 916 static bool _is_active; 917 public: 918 ClassFieldMapCacheMark() { 919 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 920 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty"); 921 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested"); 922 _is_active = true; 923 } 924 ~ClassFieldMapCacheMark() { 925 JvmtiCachedClassFieldMap::clear_cache(); 926 _is_active = false; 927 } 928 static bool is_active() { return _is_active; } 929 }; 930 931 bool ClassFieldMapCacheMark::_is_active; 932 933 934 // record that the given InstanceKlass is caching a field map 935 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) { 936 if (_class_list == NULL) { 937 _class_list = new (ResourceObj::C_HEAP, mtInternal) 938 GrowableArray<InstanceKlass*>(initial_class_count, true); 939 } 940 _class_list->push(ik); 941 } 942 943 // returns the instance field map for the given object 944 // (returns field map cached by the InstanceKlass if possible) 945 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) { 946 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 947 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active"); 948 949 Klass* k = obj->klass(); 950 InstanceKlass* ik = InstanceKlass::cast(k); 951 952 // return cached map if possible 953 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 954 if (cached_map != NULL) { 955 assert(cached_map->field_map() != NULL, "missing field list"); 956 return cached_map->field_map(); 957 } else { 958 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj); 959 cached_map = new JvmtiCachedClassFieldMap(field_map); 960 ik->set_jvmti_cached_class_field_map(cached_map); 961 add_to_class_list(ik); 962 return field_map; 963 } 964 } 965 966 // remove the fields maps cached from all instanceKlasses 967 void JvmtiCachedClassFieldMap::clear_cache() { 968 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 969 if (_class_list != NULL) { 970 for (int i = 0; i < _class_list->length(); i++) { 971 InstanceKlass* ik = _class_list->at(i); 972 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 973 assert(cached_map != NULL, "should not be NULL"); 974 ik->set_jvmti_cached_class_field_map(NULL); 975 delete cached_map; // deletes the encapsulated field map 976 } 977 delete _class_list; 978 _class_list = NULL; 979 } 980 } 981 982 // returns the number of ClassFieldMap cached by instanceKlasses 983 int JvmtiCachedClassFieldMap::cached_field_map_count() { 984 return (_class_list == NULL) ? 0 : _class_list->length(); 985 } 986 987 // helper function to indicate if an object is filtered by its tag or class tag 988 static inline bool is_filtered_by_heap_filter(jlong obj_tag, 989 jlong klass_tag, 990 int heap_filter) { 991 // apply the heap filter 992 if (obj_tag != 0) { 993 // filter out tagged objects 994 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true; 995 } else { 996 // filter out untagged objects 997 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true; 998 } 999 if (klass_tag != 0) { 1000 // filter out objects with tagged classes 1001 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true; 1002 } else { 1003 // filter out objects with untagged classes. 1004 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true; 1005 } 1006 return false; 1007 } 1008 1009 // helper function to indicate if an object is filtered by a klass filter 1010 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) { 1011 if (klass_filter != NULL) { 1012 if (obj->klass() != klass_filter) { 1013 return true; 1014 } 1015 } 1016 return false; 1017 } 1018 1019 // helper function to tell if a field is a primitive field or not 1020 static inline bool is_primitive_field_type(char type) { 1021 return (type != 'L' && type != '['); 1022 } 1023 1024 // helper function to copy the value from location addr to jvalue. 1025 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) { 1026 switch (value_type) { 1027 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; } 1028 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; } 1029 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; } 1030 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; } 1031 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; } 1032 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; } 1033 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; } 1034 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; } 1035 default: ShouldNotReachHere(); 1036 } 1037 } 1038 1039 // helper function to invoke string primitive value callback 1040 // returns visit control flags 1041 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb, 1042 CallbackWrapper* wrapper, 1043 oop str, 1044 void* user_data) 1045 { 1046 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 1047 1048 typeArrayOop s_value = java_lang_String::value(str); 1049 1050 // JDK-6584008: the value field may be null if a String instance is 1051 // partially constructed. 1052 if (s_value == NULL) { 1053 return 0; 1054 } 1055 // get the string value and length 1056 // (string value may be offset from the base) 1057 int s_len = java_lang_String::length(str); 1058 bool is_latin1 = java_lang_String::is_latin1(str); 1059 jchar* value; 1060 if (s_len > 0) { 1061 if (!is_latin1) { 1062 value = s_value->char_at_addr(0); 1063 } else { 1064 // Inflate latin1 encoded string to UTF16 1065 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal); 1066 for (int i = 0; i < s_len; i++) { 1067 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff; 1068 } 1069 value = &buf[0]; 1070 } 1071 } else { 1072 // Don't use char_at_addr(0) if length is 0 1073 value = (jchar*) s_value->base(T_CHAR); 1074 } 1075 1076 // invoke the callback 1077 jint res = (*cb)(wrapper->klass_tag(), 1078 wrapper->obj_size(), 1079 wrapper->obj_tag_p(), 1080 value, 1081 (jint)s_len, 1082 user_data); 1083 1084 if (is_latin1 && s_len > 0) { 1085 FREE_C_HEAP_ARRAY(jchar, value); 1086 } 1087 return res; 1088 } 1089 1090 // helper function to invoke string primitive value callback 1091 // returns visit control flags 1092 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb, 1093 CallbackWrapper* wrapper, 1094 oop obj, 1095 void* user_data) 1096 { 1097 assert(obj->is_typeArray(), "not a primitive array"); 1098 1099 // get base address of first element 1100 typeArrayOop array = typeArrayOop(obj); 1101 BasicType type = TypeArrayKlass::cast(array->klass())->element_type(); 1102 void* elements = array->base(type); 1103 1104 // jvmtiPrimitiveType is defined so this mapping is always correct 1105 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type); 1106 1107 return (*cb)(wrapper->klass_tag(), 1108 wrapper->obj_size(), 1109 wrapper->obj_tag_p(), 1110 (jint)array->length(), 1111 elem_type, 1112 elements, 1113 user_data); 1114 } 1115 1116 // helper function to invoke the primitive field callback for all static fields 1117 // of a given class 1118 static jint invoke_primitive_field_callback_for_static_fields 1119 (CallbackWrapper* wrapper, 1120 oop obj, 1121 jvmtiPrimitiveFieldCallback cb, 1122 void* user_data) 1123 { 1124 // for static fields only the index will be set 1125 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1126 1127 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class"); 1128 if (java_lang_Class::is_primitive(obj)) { 1129 return 0; 1130 } 1131 Klass* klass = java_lang_Class::as_Klass(obj); 1132 1133 // ignore classes for object and type arrays 1134 if (!klass->is_instance_klass()) { 1135 return 0; 1136 } 1137 1138 // ignore classes which aren't linked yet 1139 InstanceKlass* ik = InstanceKlass::cast(klass); 1140 if (!ik->is_linked()) { 1141 return 0; 1142 } 1143 1144 // get the field map 1145 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 1146 1147 // invoke the callback for each static primitive field 1148 for (int i=0; i<field_map->field_count(); i++) { 1149 ClassFieldDescriptor* field = field_map->field_at(i); 1150 1151 // ignore non-primitive fields 1152 char type = field->field_type(); 1153 if (!is_primitive_field_type(type)) { 1154 continue; 1155 } 1156 // one-to-one mapping 1157 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1158 1159 // get offset and field value 1160 int offset = field->field_offset(); 1161 address addr = (address)klass->java_mirror() + offset; 1162 jvalue value; 1163 copy_to_jvalue(&value, addr, value_type); 1164 1165 // field index 1166 reference_info.field.index = field->field_index(); 1167 1168 // invoke the callback 1169 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 1170 &reference_info, 1171 wrapper->klass_tag(), 1172 wrapper->obj_tag_p(), 1173 value, 1174 value_type, 1175 user_data); 1176 if (res & JVMTI_VISIT_ABORT) { 1177 delete field_map; 1178 return res; 1179 } 1180 } 1181 1182 delete field_map; 1183 return 0; 1184 } 1185 1186 // helper function to invoke the primitive field callback for all instance fields 1187 // of a given object 1188 static jint invoke_primitive_field_callback_for_instance_fields( 1189 CallbackWrapper* wrapper, 1190 oop obj, 1191 jvmtiPrimitiveFieldCallback cb, 1192 void* user_data) 1193 { 1194 // for instance fields only the index will be set 1195 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1196 1197 // get the map of the instance fields 1198 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj); 1199 1200 // invoke the callback for each instance primitive field 1201 for (int i=0; i<fields->field_count(); i++) { 1202 ClassFieldDescriptor* field = fields->field_at(i); 1203 1204 // ignore non-primitive fields 1205 char type = field->field_type(); 1206 if (!is_primitive_field_type(type)) { 1207 continue; 1208 } 1209 // one-to-one mapping 1210 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1211 1212 // get offset and field value 1213 int offset = field->field_offset(); 1214 address addr = (address)obj + offset; 1215 jvalue value; 1216 copy_to_jvalue(&value, addr, value_type); 1217 1218 // field index 1219 reference_info.field.index = field->field_index(); 1220 1221 // invoke the callback 1222 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD, 1223 &reference_info, 1224 wrapper->klass_tag(), 1225 wrapper->obj_tag_p(), 1226 value, 1227 value_type, 1228 user_data); 1229 if (res & JVMTI_VISIT_ABORT) { 1230 return res; 1231 } 1232 } 1233 return 0; 1234 } 1235 1236 1237 // VM operation to iterate over all objects in the heap (both reachable 1238 // and unreachable) 1239 class VM_HeapIterateOperation: public VM_Operation { 1240 private: 1241 ObjectClosure* _blk; 1242 public: 1243 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; } 1244 1245 VMOp_Type type() const { return VMOp_HeapIterateOperation; } 1246 void doit() { 1247 // allows class files maps to be cached during iteration 1248 ClassFieldMapCacheMark cm; 1249 1250 // make sure that heap is parsable (fills TLABs with filler objects) 1251 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1252 1253 // Verify heap before iteration - if the heap gets corrupted then 1254 // JVMTI's IterateOverHeap will crash. 1255 if (VerifyBeforeIteration) { 1256 Universe::verify(); 1257 } 1258 1259 // do the iteration 1260 // If this operation encounters a bad object when using CMS, 1261 // consider using safe_object_iterate() which avoids perm gen 1262 // objects that may contain bad references. 1263 Universe::heap()->object_iterate(_blk); 1264 } 1265 1266 }; 1267 1268 1269 // An ObjectClosure used to support the deprecated IterateOverHeap and 1270 // IterateOverInstancesOfClass functions 1271 class IterateOverHeapObjectClosure: public ObjectClosure { 1272 private: 1273 JvmtiTagMap* _tag_map; 1274 Klass* _klass; 1275 jvmtiHeapObjectFilter _object_filter; 1276 jvmtiHeapObjectCallback _heap_object_callback; 1277 const void* _user_data; 1278 1279 // accessors 1280 JvmtiTagMap* tag_map() const { return _tag_map; } 1281 jvmtiHeapObjectFilter object_filter() const { return _object_filter; } 1282 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; } 1283 Klass* klass() const { return _klass; } 1284 const void* user_data() const { return _user_data; } 1285 1286 // indicates if iteration has been aborted 1287 bool _iteration_aborted; 1288 bool is_iteration_aborted() const { return _iteration_aborted; } 1289 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; } 1290 1291 public: 1292 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map, 1293 Klass* klass, 1294 jvmtiHeapObjectFilter object_filter, 1295 jvmtiHeapObjectCallback heap_object_callback, 1296 const void* user_data) : 1297 _tag_map(tag_map), 1298 _klass(klass), 1299 _object_filter(object_filter), 1300 _heap_object_callback(heap_object_callback), 1301 _user_data(user_data), 1302 _iteration_aborted(false) 1303 { 1304 } 1305 1306 void do_object(oop o); 1307 }; 1308 1309 // invoked for each object in the heap 1310 void IterateOverHeapObjectClosure::do_object(oop o) { 1311 // check if iteration has been halted 1312 if (is_iteration_aborted()) return; 1313 1314 // ignore any objects that aren't visible to profiler 1315 if (!ServiceUtil::visible_oop(o)) return; 1316 1317 // instanceof check when filtering by klass 1318 if (klass() != NULL && !o->is_a(klass())) { 1319 return; 1320 } 1321 // prepare for the calllback 1322 CallbackWrapper wrapper(tag_map(), o); 1323 1324 // if the object is tagged and we're only interested in untagged objects 1325 // then don't invoke the callback. Similiarly, if the object is untagged 1326 // and we're only interested in tagged objects we skip the callback. 1327 if (wrapper.obj_tag() != 0) { 1328 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return; 1329 } else { 1330 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return; 1331 } 1332 1333 // invoke the agent's callback 1334 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(), 1335 wrapper.obj_size(), 1336 wrapper.obj_tag_p(), 1337 (void*)user_data()); 1338 if (control == JVMTI_ITERATION_ABORT) { 1339 set_iteration_aborted(true); 1340 } 1341 } 1342 1343 // An ObjectClosure used to support the IterateThroughHeap function 1344 class IterateThroughHeapObjectClosure: public ObjectClosure { 1345 private: 1346 JvmtiTagMap* _tag_map; 1347 Klass* _klass; 1348 int _heap_filter; 1349 const jvmtiHeapCallbacks* _callbacks; 1350 const void* _user_data; 1351 1352 // accessor functions 1353 JvmtiTagMap* tag_map() const { return _tag_map; } 1354 int heap_filter() const { return _heap_filter; } 1355 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; } 1356 Klass* klass() const { return _klass; } 1357 const void* user_data() const { return _user_data; } 1358 1359 // indicates if the iteration has been aborted 1360 bool _iteration_aborted; 1361 bool is_iteration_aborted() const { return _iteration_aborted; } 1362 1363 // used to check the visit control flags. If the abort flag is set 1364 // then we set the iteration aborted flag so that the iteration completes 1365 // without processing any further objects 1366 bool check_flags_for_abort(jint flags) { 1367 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0; 1368 if (is_abort) { 1369 _iteration_aborted = true; 1370 } 1371 return is_abort; 1372 } 1373 1374 public: 1375 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map, 1376 Klass* klass, 1377 int heap_filter, 1378 const jvmtiHeapCallbacks* heap_callbacks, 1379 const void* user_data) : 1380 _tag_map(tag_map), 1381 _klass(klass), 1382 _heap_filter(heap_filter), 1383 _callbacks(heap_callbacks), 1384 _user_data(user_data), 1385 _iteration_aborted(false) 1386 { 1387 } 1388 1389 void do_object(oop o); 1390 }; 1391 1392 // invoked for each object in the heap 1393 void IterateThroughHeapObjectClosure::do_object(oop obj) { 1394 // check if iteration has been halted 1395 if (is_iteration_aborted()) return; 1396 1397 // ignore any objects that aren't visible to profiler 1398 if (!ServiceUtil::visible_oop(obj)) return; 1399 1400 // apply class filter 1401 if (is_filtered_by_klass_filter(obj, klass())) return; 1402 1403 // prepare for callback 1404 CallbackWrapper wrapper(tag_map(), obj); 1405 1406 // check if filtered by the heap filter 1407 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) { 1408 return; 1409 } 1410 1411 // for arrays we need the length, otherwise -1 1412 bool is_array = obj->is_array(); 1413 int len = is_array ? arrayOop(obj)->length() : -1; 1414 1415 // invoke the object callback (if callback is provided) 1416 if (callbacks()->heap_iteration_callback != NULL) { 1417 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback; 1418 jint res = (*cb)(wrapper.klass_tag(), 1419 wrapper.obj_size(), 1420 wrapper.obj_tag_p(), 1421 (jint)len, 1422 (void*)user_data()); 1423 if (check_flags_for_abort(res)) return; 1424 } 1425 1426 // for objects and classes we report primitive fields if callback provided 1427 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) { 1428 jint res; 1429 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback; 1430 if (obj->klass() == SystemDictionary::Class_klass()) { 1431 res = invoke_primitive_field_callback_for_static_fields(&wrapper, 1432 obj, 1433 cb, 1434 (void*)user_data()); 1435 } else { 1436 res = invoke_primitive_field_callback_for_instance_fields(&wrapper, 1437 obj, 1438 cb, 1439 (void*)user_data()); 1440 } 1441 if (check_flags_for_abort(res)) return; 1442 } 1443 1444 // string callback 1445 if (!is_array && 1446 callbacks()->string_primitive_value_callback != NULL && 1447 obj->klass() == SystemDictionary::String_klass()) { 1448 jint res = invoke_string_value_callback( 1449 callbacks()->string_primitive_value_callback, 1450 &wrapper, 1451 obj, 1452 (void*)user_data() ); 1453 if (check_flags_for_abort(res)) return; 1454 } 1455 1456 // array callback 1457 if (is_array && 1458 callbacks()->array_primitive_value_callback != NULL && 1459 obj->is_typeArray()) { 1460 jint res = invoke_array_primitive_value_callback( 1461 callbacks()->array_primitive_value_callback, 1462 &wrapper, 1463 obj, 1464 (void*)user_data() ); 1465 if (check_flags_for_abort(res)) return; 1466 } 1467 }; 1468 1469 1470 // Deprecated function to iterate over all objects in the heap 1471 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter, 1472 Klass* klass, 1473 jvmtiHeapObjectCallback heap_object_callback, 1474 const void* user_data) 1475 { 1476 MutexLocker ml(Heap_lock); 1477 IterateOverHeapObjectClosure blk(this, 1478 klass, 1479 object_filter, 1480 heap_object_callback, 1481 user_data); 1482 VM_HeapIterateOperation op(&blk); 1483 VMThread::execute(&op); 1484 } 1485 1486 1487 // Iterates over all objects in the heap 1488 void JvmtiTagMap::iterate_through_heap(jint heap_filter, 1489 Klass* klass, 1490 const jvmtiHeapCallbacks* callbacks, 1491 const void* user_data) 1492 { 1493 MutexLocker ml(Heap_lock); 1494 IterateThroughHeapObjectClosure blk(this, 1495 klass, 1496 heap_filter, 1497 callbacks, 1498 user_data); 1499 VM_HeapIterateOperation op(&blk); 1500 VMThread::execute(&op); 1501 } 1502 1503 // support class for get_objects_with_tags 1504 1505 class TagObjectCollector : public JvmtiTagHashmapEntryClosure { 1506 private: 1507 JvmtiEnv* _env; 1508 jlong* _tags; 1509 jint _tag_count; 1510 1511 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs) 1512 GrowableArray<uint64_t>* _tag_results; // collected tags 1513 1514 public: 1515 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) { 1516 _env = env; 1517 _tags = (jlong*)tags; 1518 _tag_count = tag_count; 1519 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true); 1520 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true); 1521 } 1522 1523 ~TagObjectCollector() { 1524 delete _object_results; 1525 delete _tag_results; 1526 } 1527 1528 // for each tagged object check if the tag value matches 1529 // - if it matches then we create a JNI local reference to the object 1530 // and record the reference and tag value. 1531 // 1532 void do_entry(JvmtiTagHashmapEntry* entry) { 1533 for (int i=0; i<_tag_count; i++) { 1534 if (_tags[i] == entry->tag()) { 1535 oop o = entry->object(); 1536 assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check"); 1537 #if INCLUDE_ALL_GCS 1538 // The reference in this tag map could be the only (implicitly weak) 1539 // reference to that object. If we hand it out, we need to keep it live wrt 1540 // SATB marking similar to other j.l.ref.Reference referents. 1541 oopDesc::bs()->keep_alive_barrier(o); 1542 #endif 1543 jobject ref = JNIHandles::make_local(JavaThread::current(), o); 1544 _object_results->append(ref); 1545 _tag_results->append((uint64_t)entry->tag()); 1546 } 1547 } 1548 } 1549 1550 // return the results from the collection 1551 // 1552 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1553 jvmtiError error; 1554 int count = _object_results->length(); 1555 assert(count >= 0, "sanity check"); 1556 1557 // if object_result_ptr is not NULL then allocate the result and copy 1558 // in the object references. 1559 if (object_result_ptr != NULL) { 1560 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr); 1561 if (error != JVMTI_ERROR_NONE) { 1562 return error; 1563 } 1564 for (int i=0; i<count; i++) { 1565 (*object_result_ptr)[i] = _object_results->at(i); 1566 } 1567 } 1568 1569 // if tag_result_ptr is not NULL then allocate the result and copy 1570 // in the tag values. 1571 if (tag_result_ptr != NULL) { 1572 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr); 1573 if (error != JVMTI_ERROR_NONE) { 1574 if (object_result_ptr != NULL) { 1575 _env->Deallocate((unsigned char*)object_result_ptr); 1576 } 1577 return error; 1578 } 1579 for (int i=0; i<count; i++) { 1580 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i); 1581 } 1582 } 1583 1584 *count_ptr = count; 1585 return JVMTI_ERROR_NONE; 1586 } 1587 }; 1588 1589 // return the list of objects with the specified tags 1590 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags, 1591 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1592 1593 TagObjectCollector collector(env(), tags, count); 1594 { 1595 // iterate over all tagged objects 1596 MutexLocker ml(lock()); 1597 entry_iterate(&collector); 1598 } 1599 return collector.result(count_ptr, object_result_ptr, tag_result_ptr); 1600 } 1601 1602 1603 // ObjectMarker is used to support the marking objects when walking the 1604 // heap. 1605 // 1606 // This implementation uses the existing mark bits in an object for 1607 // marking. Objects that are marked must later have their headers restored. 1608 // As most objects are unlocked and don't have their identity hash computed 1609 // we don't have to save their headers. Instead we save the headers that 1610 // are "interesting". Later when the headers are restored this implementation 1611 // restores all headers to their initial value and then restores the few 1612 // objects that had interesting headers. 1613 // 1614 // Future work: This implementation currently uses growable arrays to save 1615 // the oop and header of interesting objects. As an optimization we could 1616 // use the same technique as the GC and make use of the unused area 1617 // between top() and end(). 1618 // 1619 1620 // An ObjectClosure used to restore the mark bits of an object 1621 class RestoreMarksClosure : public ObjectClosure { 1622 public: 1623 void do_object(oop o) { 1624 if (o != NULL) { 1625 markOop mark = o->mark(); 1626 if (mark->is_marked()) { 1627 o->init_mark(); 1628 } 1629 } 1630 } 1631 }; 1632 1633 // ObjectMarker provides the mark and visited functions 1634 class ObjectMarker : AllStatic { 1635 private: 1636 // saved headers 1637 static GrowableArray<oop>* _saved_oop_stack; 1638 static GrowableArray<markOop>* _saved_mark_stack; 1639 static bool _needs_reset; // do we need to reset mark bits? 1640 1641 public: 1642 static void init(); // initialize 1643 static void done(); // clean-up 1644 1645 static inline void mark(oop o); // mark an object 1646 static inline bool visited(oop o); // check if object has been visited 1647 1648 static inline bool needs_reset() { return _needs_reset; } 1649 static inline void set_needs_reset(bool v) { _needs_reset = v; } 1650 }; 1651 1652 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL; 1653 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL; 1654 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default 1655 1656 // initialize ObjectMarker - prepares for object marking 1657 void ObjectMarker::init() { 1658 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 1659 1660 // prepare heap for iteration 1661 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1662 1663 // create stacks for interesting headers 1664 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true); 1665 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true); 1666 1667 if (UseBiasedLocking) { 1668 BiasedLocking::preserve_marks(); 1669 } 1670 } 1671 1672 // Object marking is done so restore object headers 1673 void ObjectMarker::done() { 1674 // iterate over all objects and restore the mark bits to 1675 // their initial value 1676 RestoreMarksClosure blk; 1677 if (needs_reset()) { 1678 Universe::heap()->object_iterate(&blk); 1679 } else { 1680 // We don't need to reset mark bits on this call, but reset the 1681 // flag to the default for the next call. 1682 set_needs_reset(true); 1683 } 1684 1685 // now restore the interesting headers 1686 for (int i = 0; i < _saved_oop_stack->length(); i++) { 1687 oop o = _saved_oop_stack->at(i); 1688 markOop mark = _saved_mark_stack->at(i); 1689 o->set_mark(mark); 1690 } 1691 1692 if (UseBiasedLocking) { 1693 BiasedLocking::restore_marks(); 1694 } 1695 1696 // free the stacks 1697 delete _saved_oop_stack; 1698 delete _saved_mark_stack; 1699 } 1700 1701 // mark an object 1702 inline void ObjectMarker::mark(oop o) { 1703 assert(Universe::heap()->is_in(o), "sanity check"); 1704 assert(!o->mark()->is_marked(), "should only mark an object once"); 1705 1706 // object's mark word 1707 markOop mark = o->mark(); 1708 1709 if (mark->must_be_preserved(o)) { 1710 _saved_mark_stack->push(mark); 1711 _saved_oop_stack->push(o); 1712 } 1713 1714 // mark the object 1715 o->set_mark(markOopDesc::prototype()->set_marked()); 1716 } 1717 1718 // return true if object is marked 1719 inline bool ObjectMarker::visited(oop o) { 1720 return o->mark()->is_marked(); 1721 } 1722 1723 // Stack allocated class to help ensure that ObjectMarker is used 1724 // correctly. Constructor initializes ObjectMarker, destructor calls 1725 // ObjectMarker's done() function to restore object headers. 1726 class ObjectMarkerController : public StackObj { 1727 public: 1728 ObjectMarkerController() { 1729 ObjectMarker::init(); 1730 } 1731 ~ObjectMarkerController() { 1732 ObjectMarker::done(); 1733 } 1734 }; 1735 1736 1737 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind 1738 // (not performance critical as only used for roots) 1739 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) { 1740 switch (kind) { 1741 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL; 1742 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS; 1743 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR; 1744 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL; 1745 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL; 1746 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD; 1747 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER; 1748 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER; 1749 } 1750 } 1751 1752 // Base class for all heap walk contexts. The base class maintains a flag 1753 // to indicate if the context is valid or not. 1754 class HeapWalkContext VALUE_OBJ_CLASS_SPEC { 1755 private: 1756 bool _valid; 1757 public: 1758 HeapWalkContext(bool valid) { _valid = valid; } 1759 void invalidate() { _valid = false; } 1760 bool is_valid() const { return _valid; } 1761 }; 1762 1763 // A basic heap walk context for the deprecated heap walking functions. 1764 // The context for a basic heap walk are the callbacks and fields used by 1765 // the referrer caching scheme. 1766 class BasicHeapWalkContext: public HeapWalkContext { 1767 private: 1768 jvmtiHeapRootCallback _heap_root_callback; 1769 jvmtiStackReferenceCallback _stack_ref_callback; 1770 jvmtiObjectReferenceCallback _object_ref_callback; 1771 1772 // used for caching 1773 oop _last_referrer; 1774 jlong _last_referrer_tag; 1775 1776 public: 1777 BasicHeapWalkContext() : HeapWalkContext(false) { } 1778 1779 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback, 1780 jvmtiStackReferenceCallback stack_ref_callback, 1781 jvmtiObjectReferenceCallback object_ref_callback) : 1782 HeapWalkContext(true), 1783 _heap_root_callback(heap_root_callback), 1784 _stack_ref_callback(stack_ref_callback), 1785 _object_ref_callback(object_ref_callback), 1786 _last_referrer(NULL), 1787 _last_referrer_tag(0) { 1788 } 1789 1790 // accessors 1791 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; } 1792 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; } 1793 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; } 1794 1795 oop last_referrer() const { return _last_referrer; } 1796 void set_last_referrer(oop referrer) { _last_referrer = referrer; } 1797 jlong last_referrer_tag() const { return _last_referrer_tag; } 1798 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; } 1799 }; 1800 1801 // The advanced heap walk context for the FollowReferences functions. 1802 // The context is the callbacks, and the fields used for filtering. 1803 class AdvancedHeapWalkContext: public HeapWalkContext { 1804 private: 1805 jint _heap_filter; 1806 Klass* _klass_filter; 1807 const jvmtiHeapCallbacks* _heap_callbacks; 1808 1809 public: 1810 AdvancedHeapWalkContext() : HeapWalkContext(false) { } 1811 1812 AdvancedHeapWalkContext(jint heap_filter, 1813 Klass* klass_filter, 1814 const jvmtiHeapCallbacks* heap_callbacks) : 1815 HeapWalkContext(true), 1816 _heap_filter(heap_filter), 1817 _klass_filter(klass_filter), 1818 _heap_callbacks(heap_callbacks) { 1819 } 1820 1821 // accessors 1822 jint heap_filter() const { return _heap_filter; } 1823 Klass* klass_filter() const { return _klass_filter; } 1824 1825 const jvmtiHeapReferenceCallback heap_reference_callback() const { 1826 return _heap_callbacks->heap_reference_callback; 1827 }; 1828 const jvmtiPrimitiveFieldCallback primitive_field_callback() const { 1829 return _heap_callbacks->primitive_field_callback; 1830 } 1831 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const { 1832 return _heap_callbacks->array_primitive_value_callback; 1833 } 1834 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const { 1835 return _heap_callbacks->string_primitive_value_callback; 1836 } 1837 }; 1838 1839 // The CallbackInvoker is a class with static functions that the heap walk can call 1840 // into to invoke callbacks. It works in one of two modes. The "basic" mode is 1841 // used for the deprecated IterateOverReachableObjects functions. The "advanced" 1842 // mode is for the newer FollowReferences function which supports a lot of 1843 // additional callbacks. 1844 class CallbackInvoker : AllStatic { 1845 private: 1846 // heap walk styles 1847 enum { basic, advanced }; 1848 static int _heap_walk_type; 1849 static bool is_basic_heap_walk() { return _heap_walk_type == basic; } 1850 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; } 1851 1852 // context for basic style heap walk 1853 static BasicHeapWalkContext _basic_context; 1854 static BasicHeapWalkContext* basic_context() { 1855 assert(_basic_context.is_valid(), "invalid"); 1856 return &_basic_context; 1857 } 1858 1859 // context for advanced style heap walk 1860 static AdvancedHeapWalkContext _advanced_context; 1861 static AdvancedHeapWalkContext* advanced_context() { 1862 assert(_advanced_context.is_valid(), "invalid"); 1863 return &_advanced_context; 1864 } 1865 1866 // context needed for all heap walks 1867 static JvmtiTagMap* _tag_map; 1868 static const void* _user_data; 1869 static GrowableArray<oop>* _visit_stack; 1870 1871 // accessors 1872 static JvmtiTagMap* tag_map() { return _tag_map; } 1873 static const void* user_data() { return _user_data; } 1874 static GrowableArray<oop>* visit_stack() { return _visit_stack; } 1875 1876 // if the object hasn't been visited then push it onto the visit stack 1877 // so that it will be visited later 1878 static inline bool check_for_visit(oop obj) { 1879 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj); 1880 return true; 1881 } 1882 1883 // invoke basic style callbacks 1884 static inline bool invoke_basic_heap_root_callback 1885 (jvmtiHeapRootKind root_kind, oop obj); 1886 static inline bool invoke_basic_stack_ref_callback 1887 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method, 1888 int slot, oop obj); 1889 static inline bool invoke_basic_object_reference_callback 1890 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index); 1891 1892 // invoke advanced style callbacks 1893 static inline bool invoke_advanced_heap_root_callback 1894 (jvmtiHeapReferenceKind ref_kind, oop obj); 1895 static inline bool invoke_advanced_stack_ref_callback 1896 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth, 1897 jmethodID method, jlocation bci, jint slot, oop obj); 1898 static inline bool invoke_advanced_object_reference_callback 1899 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index); 1900 1901 // used to report the value of primitive fields 1902 static inline bool report_primitive_field 1903 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type); 1904 1905 public: 1906 // initialize for basic mode 1907 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1908 GrowableArray<oop>* visit_stack, 1909 const void* user_data, 1910 BasicHeapWalkContext context); 1911 1912 // initialize for advanced mode 1913 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1914 GrowableArray<oop>* visit_stack, 1915 const void* user_data, 1916 AdvancedHeapWalkContext context); 1917 1918 // functions to report roots 1919 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o); 1920 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth, 1921 jmethodID m, oop o); 1922 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth, 1923 jmethodID method, jlocation bci, jint slot, oop o); 1924 1925 // functions to report references 1926 static inline bool report_array_element_reference(oop referrer, oop referree, jint index); 1927 static inline bool report_class_reference(oop referrer, oop referree); 1928 static inline bool report_class_loader_reference(oop referrer, oop referree); 1929 static inline bool report_signers_reference(oop referrer, oop referree); 1930 static inline bool report_protection_domain_reference(oop referrer, oop referree); 1931 static inline bool report_superclass_reference(oop referrer, oop referree); 1932 static inline bool report_interface_reference(oop referrer, oop referree); 1933 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot); 1934 static inline bool report_field_reference(oop referrer, oop referree, jint slot); 1935 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index); 1936 static inline bool report_primitive_array_values(oop array); 1937 static inline bool report_string_value(oop str); 1938 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type); 1939 static inline bool report_primitive_static_field(oop o, jint index, address value, char type); 1940 }; 1941 1942 // statics 1943 int CallbackInvoker::_heap_walk_type; 1944 BasicHeapWalkContext CallbackInvoker::_basic_context; 1945 AdvancedHeapWalkContext CallbackInvoker::_advanced_context; 1946 JvmtiTagMap* CallbackInvoker::_tag_map; 1947 const void* CallbackInvoker::_user_data; 1948 GrowableArray<oop>* CallbackInvoker::_visit_stack; 1949 1950 // initialize for basic heap walk (IterateOverReachableObjects et al) 1951 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1952 GrowableArray<oop>* visit_stack, 1953 const void* user_data, 1954 BasicHeapWalkContext context) { 1955 _tag_map = tag_map; 1956 _visit_stack = visit_stack; 1957 _user_data = user_data; 1958 _basic_context = context; 1959 _advanced_context.invalidate(); // will trigger assertion if used 1960 _heap_walk_type = basic; 1961 } 1962 1963 // initialize for advanced heap walk (FollowReferences) 1964 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1965 GrowableArray<oop>* visit_stack, 1966 const void* user_data, 1967 AdvancedHeapWalkContext context) { 1968 _tag_map = tag_map; 1969 _visit_stack = visit_stack; 1970 _user_data = user_data; 1971 _advanced_context = context; 1972 _basic_context.invalidate(); // will trigger assertion if used 1973 _heap_walk_type = advanced; 1974 } 1975 1976 1977 // invoke basic style heap root callback 1978 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) { 1979 assert(ServiceUtil::visible_oop(obj), "checking"); 1980 1981 // if we heap roots should be reported 1982 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback(); 1983 if (cb == NULL) { 1984 return check_for_visit(obj); 1985 } 1986 1987 CallbackWrapper wrapper(tag_map(), obj); 1988 jvmtiIterationControl control = (*cb)(root_kind, 1989 wrapper.klass_tag(), 1990 wrapper.obj_size(), 1991 wrapper.obj_tag_p(), 1992 (void*)user_data()); 1993 // push root to visit stack when following references 1994 if (control == JVMTI_ITERATION_CONTINUE && 1995 basic_context()->object_ref_callback() != NULL) { 1996 visit_stack()->push(obj); 1997 } 1998 return control != JVMTI_ITERATION_ABORT; 1999 } 2000 2001 // invoke basic style stack ref callback 2002 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind, 2003 jlong thread_tag, 2004 jint depth, 2005 jmethodID method, 2006 jint slot, 2007 oop obj) { 2008 assert(ServiceUtil::visible_oop(obj), "checking"); 2009 2010 // if we stack refs should be reported 2011 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback(); 2012 if (cb == NULL) { 2013 return check_for_visit(obj); 2014 } 2015 2016 CallbackWrapper wrapper(tag_map(), obj); 2017 jvmtiIterationControl control = (*cb)(root_kind, 2018 wrapper.klass_tag(), 2019 wrapper.obj_size(), 2020 wrapper.obj_tag_p(), 2021 thread_tag, 2022 depth, 2023 method, 2024 slot, 2025 (void*)user_data()); 2026 // push root to visit stack when following references 2027 if (control == JVMTI_ITERATION_CONTINUE && 2028 basic_context()->object_ref_callback() != NULL) { 2029 visit_stack()->push(obj); 2030 } 2031 return control != JVMTI_ITERATION_ABORT; 2032 } 2033 2034 // invoke basic style object reference callback 2035 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind, 2036 oop referrer, 2037 oop referree, 2038 jint index) { 2039 2040 assert(ServiceUtil::visible_oop(referrer), "checking"); 2041 assert(ServiceUtil::visible_oop(referree), "checking"); 2042 2043 BasicHeapWalkContext* context = basic_context(); 2044 2045 // callback requires the referrer's tag. If it's the same referrer 2046 // as the last call then we use the cached value. 2047 jlong referrer_tag; 2048 if (referrer == context->last_referrer()) { 2049 referrer_tag = context->last_referrer_tag(); 2050 } else { 2051 referrer_tag = tag_for(tag_map(), referrer); 2052 } 2053 2054 // do the callback 2055 CallbackWrapper wrapper(tag_map(), referree); 2056 jvmtiObjectReferenceCallback cb = context->object_ref_callback(); 2057 jvmtiIterationControl control = (*cb)(ref_kind, 2058 wrapper.klass_tag(), 2059 wrapper.obj_size(), 2060 wrapper.obj_tag_p(), 2061 referrer_tag, 2062 index, 2063 (void*)user_data()); 2064 2065 // record referrer and referrer tag. For self-references record the 2066 // tag value from the callback as this might differ from referrer_tag. 2067 context->set_last_referrer(referrer); 2068 if (referrer == referree) { 2069 context->set_last_referrer_tag(*wrapper.obj_tag_p()); 2070 } else { 2071 context->set_last_referrer_tag(referrer_tag); 2072 } 2073 2074 if (control == JVMTI_ITERATION_CONTINUE) { 2075 return check_for_visit(referree); 2076 } else { 2077 return control != JVMTI_ITERATION_ABORT; 2078 } 2079 } 2080 2081 // invoke advanced style heap root callback 2082 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind, 2083 oop obj) { 2084 assert(ServiceUtil::visible_oop(obj), "checking"); 2085 2086 AdvancedHeapWalkContext* context = advanced_context(); 2087 2088 // check that callback is provided 2089 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2090 if (cb == NULL) { 2091 return check_for_visit(obj); 2092 } 2093 2094 // apply class filter 2095 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2096 return check_for_visit(obj); 2097 } 2098 2099 // setup the callback wrapper 2100 CallbackWrapper wrapper(tag_map(), obj); 2101 2102 // apply tag filter 2103 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2104 wrapper.klass_tag(), 2105 context->heap_filter())) { 2106 return check_for_visit(obj); 2107 } 2108 2109 // for arrays we need the length, otherwise -1 2110 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2111 2112 // invoke the callback 2113 jint res = (*cb)(ref_kind, 2114 NULL, // referrer info 2115 wrapper.klass_tag(), 2116 0, // referrer_class_tag is 0 for heap root 2117 wrapper.obj_size(), 2118 wrapper.obj_tag_p(), 2119 NULL, // referrer_tag_p 2120 len, 2121 (void*)user_data()); 2122 if (res & JVMTI_VISIT_ABORT) { 2123 return false;// referrer class tag 2124 } 2125 if (res & JVMTI_VISIT_OBJECTS) { 2126 check_for_visit(obj); 2127 } 2128 return true; 2129 } 2130 2131 // report a reference from a thread stack to an object 2132 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind, 2133 jlong thread_tag, 2134 jlong tid, 2135 int depth, 2136 jmethodID method, 2137 jlocation bci, 2138 jint slot, 2139 oop obj) { 2140 assert(ServiceUtil::visible_oop(obj), "checking"); 2141 2142 AdvancedHeapWalkContext* context = advanced_context(); 2143 2144 // check that callback is provider 2145 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2146 if (cb == NULL) { 2147 return check_for_visit(obj); 2148 } 2149 2150 // apply class filter 2151 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2152 return check_for_visit(obj); 2153 } 2154 2155 // setup the callback wrapper 2156 CallbackWrapper wrapper(tag_map(), obj); 2157 2158 // apply tag filter 2159 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2160 wrapper.klass_tag(), 2161 context->heap_filter())) { 2162 return check_for_visit(obj); 2163 } 2164 2165 // setup the referrer info 2166 jvmtiHeapReferenceInfo reference_info; 2167 reference_info.stack_local.thread_tag = thread_tag; 2168 reference_info.stack_local.thread_id = tid; 2169 reference_info.stack_local.depth = depth; 2170 reference_info.stack_local.method = method; 2171 reference_info.stack_local.location = bci; 2172 reference_info.stack_local.slot = slot; 2173 2174 // for arrays we need the length, otherwise -1 2175 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2176 2177 // call into the agent 2178 int res = (*cb)(ref_kind, 2179 &reference_info, 2180 wrapper.klass_tag(), 2181 0, // referrer_class_tag is 0 for heap root (stack) 2182 wrapper.obj_size(), 2183 wrapper.obj_tag_p(), 2184 NULL, // referrer_tag is 0 for root 2185 len, 2186 (void*)user_data()); 2187 2188 if (res & JVMTI_VISIT_ABORT) { 2189 return false; 2190 } 2191 if (res & JVMTI_VISIT_OBJECTS) { 2192 check_for_visit(obj); 2193 } 2194 return true; 2195 } 2196 2197 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback 2198 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed. 2199 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \ 2200 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \ 2201 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \ 2202 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \ 2203 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \ 2204 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL)) 2205 2206 // invoke the object reference callback to report a reference 2207 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind, 2208 oop referrer, 2209 oop obj, 2210 jint index) 2211 { 2212 // field index is only valid field in reference_info 2213 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2214 2215 assert(ServiceUtil::visible_oop(referrer), "checking"); 2216 assert(ServiceUtil::visible_oop(obj), "checking"); 2217 2218 AdvancedHeapWalkContext* context = advanced_context(); 2219 2220 // check that callback is provider 2221 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2222 if (cb == NULL) { 2223 return check_for_visit(obj); 2224 } 2225 2226 // apply class filter 2227 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2228 return check_for_visit(obj); 2229 } 2230 2231 // setup the callback wrapper 2232 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj); 2233 2234 // apply tag filter 2235 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2236 wrapper.klass_tag(), 2237 context->heap_filter())) { 2238 return check_for_visit(obj); 2239 } 2240 2241 // field index is only valid field in reference_info 2242 reference_info.field.index = index; 2243 2244 // for arrays we need the length, otherwise -1 2245 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2246 2247 // invoke the callback 2248 int res = (*cb)(ref_kind, 2249 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL, 2250 wrapper.klass_tag(), 2251 wrapper.referrer_klass_tag(), 2252 wrapper.obj_size(), 2253 wrapper.obj_tag_p(), 2254 wrapper.referrer_tag_p(), 2255 len, 2256 (void*)user_data()); 2257 2258 if (res & JVMTI_VISIT_ABORT) { 2259 return false; 2260 } 2261 if (res & JVMTI_VISIT_OBJECTS) { 2262 check_for_visit(obj); 2263 } 2264 return true; 2265 } 2266 2267 // report a "simple root" 2268 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) { 2269 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL && 2270 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root"); 2271 assert(ServiceUtil::visible_oop(obj), "checking"); 2272 2273 if (is_basic_heap_walk()) { 2274 // map to old style root kind 2275 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind); 2276 return invoke_basic_heap_root_callback(root_kind, obj); 2277 } else { 2278 assert(is_advanced_heap_walk(), "wrong heap walk type"); 2279 return invoke_advanced_heap_root_callback(kind, obj); 2280 } 2281 } 2282 2283 2284 // invoke the primitive array values 2285 inline bool CallbackInvoker::report_primitive_array_values(oop obj) { 2286 assert(obj->is_typeArray(), "not a primitive array"); 2287 2288 AdvancedHeapWalkContext* context = advanced_context(); 2289 assert(context->array_primitive_value_callback() != NULL, "no callback"); 2290 2291 // apply class filter 2292 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2293 return true; 2294 } 2295 2296 CallbackWrapper wrapper(tag_map(), obj); 2297 2298 // apply tag filter 2299 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2300 wrapper.klass_tag(), 2301 context->heap_filter())) { 2302 return true; 2303 } 2304 2305 // invoke the callback 2306 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(), 2307 &wrapper, 2308 obj, 2309 (void*)user_data()); 2310 return (!(res & JVMTI_VISIT_ABORT)); 2311 } 2312 2313 // invoke the string value callback 2314 inline bool CallbackInvoker::report_string_value(oop str) { 2315 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 2316 2317 AdvancedHeapWalkContext* context = advanced_context(); 2318 assert(context->string_primitive_value_callback() != NULL, "no callback"); 2319 2320 // apply class filter 2321 if (is_filtered_by_klass_filter(str, context->klass_filter())) { 2322 return true; 2323 } 2324 2325 CallbackWrapper wrapper(tag_map(), str); 2326 2327 // apply tag filter 2328 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2329 wrapper.klass_tag(), 2330 context->heap_filter())) { 2331 return true; 2332 } 2333 2334 // invoke the callback 2335 int res = invoke_string_value_callback(context->string_primitive_value_callback(), 2336 &wrapper, 2337 str, 2338 (void*)user_data()); 2339 return (!(res & JVMTI_VISIT_ABORT)); 2340 } 2341 2342 // invoke the primitive field callback 2343 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind, 2344 oop obj, 2345 jint index, 2346 address addr, 2347 char type) 2348 { 2349 // for primitive fields only the index will be set 2350 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2351 2352 AdvancedHeapWalkContext* context = advanced_context(); 2353 assert(context->primitive_field_callback() != NULL, "no callback"); 2354 2355 // apply class filter 2356 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2357 return true; 2358 } 2359 2360 CallbackWrapper wrapper(tag_map(), obj); 2361 2362 // apply tag filter 2363 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2364 wrapper.klass_tag(), 2365 context->heap_filter())) { 2366 return true; 2367 } 2368 2369 // the field index in the referrer 2370 reference_info.field.index = index; 2371 2372 // map the type 2373 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 2374 2375 // setup the jvalue 2376 jvalue value; 2377 copy_to_jvalue(&value, addr, value_type); 2378 2379 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback(); 2380 int res = (*cb)(ref_kind, 2381 &reference_info, 2382 wrapper.klass_tag(), 2383 wrapper.obj_tag_p(), 2384 value, 2385 value_type, 2386 (void*)user_data()); 2387 return (!(res & JVMTI_VISIT_ABORT)); 2388 } 2389 2390 2391 // instance field 2392 inline bool CallbackInvoker::report_primitive_instance_field(oop obj, 2393 jint index, 2394 address value, 2395 char type) { 2396 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD, 2397 obj, 2398 index, 2399 value, 2400 type); 2401 } 2402 2403 // static field 2404 inline bool CallbackInvoker::report_primitive_static_field(oop obj, 2405 jint index, 2406 address value, 2407 char type) { 2408 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 2409 obj, 2410 index, 2411 value, 2412 type); 2413 } 2414 2415 // report a JNI local (root object) to the profiler 2416 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) { 2417 if (is_basic_heap_walk()) { 2418 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL, 2419 thread_tag, 2420 depth, 2421 m, 2422 -1, 2423 obj); 2424 } else { 2425 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL, 2426 thread_tag, tid, 2427 depth, 2428 m, 2429 (jlocation)-1, 2430 -1, 2431 obj); 2432 } 2433 } 2434 2435 2436 // report a local (stack reference, root object) 2437 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag, 2438 jlong tid, 2439 jint depth, 2440 jmethodID method, 2441 jlocation bci, 2442 jint slot, 2443 oop obj) { 2444 if (is_basic_heap_walk()) { 2445 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL, 2446 thread_tag, 2447 depth, 2448 method, 2449 slot, 2450 obj); 2451 } else { 2452 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL, 2453 thread_tag, 2454 tid, 2455 depth, 2456 method, 2457 bci, 2458 slot, 2459 obj); 2460 } 2461 } 2462 2463 // report an object referencing a class. 2464 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) { 2465 if (is_basic_heap_walk()) { 2466 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2467 } else { 2468 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1); 2469 } 2470 } 2471 2472 // report a class referencing its class loader. 2473 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) { 2474 if (is_basic_heap_walk()) { 2475 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2476 } else { 2477 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2478 } 2479 } 2480 2481 // report a class referencing its signers. 2482 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) { 2483 if (is_basic_heap_walk()) { 2484 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1); 2485 } else { 2486 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1); 2487 } 2488 } 2489 2490 // report a class referencing its protection domain.. 2491 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) { 2492 if (is_basic_heap_walk()) { 2493 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2494 } else { 2495 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2496 } 2497 } 2498 2499 // report a class referencing its superclass. 2500 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) { 2501 if (is_basic_heap_walk()) { 2502 // Send this to be consistent with past implementation 2503 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2504 } else { 2505 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1); 2506 } 2507 } 2508 2509 // report a class referencing one of its interfaces. 2510 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) { 2511 if (is_basic_heap_walk()) { 2512 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1); 2513 } else { 2514 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1); 2515 } 2516 } 2517 2518 // report a class referencing one of its static fields. 2519 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) { 2520 if (is_basic_heap_walk()) { 2521 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2522 } else { 2523 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2524 } 2525 } 2526 2527 // report an array referencing an element object 2528 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) { 2529 if (is_basic_heap_walk()) { 2530 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2531 } else { 2532 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2533 } 2534 } 2535 2536 // report an object referencing an instance field object 2537 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) { 2538 if (is_basic_heap_walk()) { 2539 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot); 2540 } else { 2541 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot); 2542 } 2543 } 2544 2545 // report an array referencing an element object 2546 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) { 2547 if (is_basic_heap_walk()) { 2548 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2549 } else { 2550 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2551 } 2552 } 2553 2554 // A supporting closure used to process simple roots 2555 class SimpleRootsClosure : public OopClosure { 2556 private: 2557 jvmtiHeapReferenceKind _kind; 2558 bool _continue; 2559 2560 jvmtiHeapReferenceKind root_kind() { return _kind; } 2561 2562 public: 2563 void set_kind(jvmtiHeapReferenceKind kind) { 2564 _kind = kind; 2565 _continue = true; 2566 } 2567 2568 inline bool stopped() { 2569 return !_continue; 2570 } 2571 2572 void do_oop(oop* obj_p) { 2573 // iteration has terminated 2574 if (stopped()) { 2575 return; 2576 } 2577 2578 // ignore null or deleted handles 2579 oop o = *obj_p; 2580 if (o == NULL || oopDesc::equals(o, JNIHandles::deleted_handle())) { 2581 return; 2582 } 2583 2584 assert(Universe::heap()->is_in_reserved(o), "should be impossible"); 2585 2586 jvmtiHeapReferenceKind kind = root_kind(); 2587 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) { 2588 // SystemDictionary::always_strong_oops_do reports the application 2589 // class loader as a root. We want this root to be reported as 2590 // a root kind of "OTHER" rather than "SYSTEM_CLASS". 2591 if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) { 2592 kind = JVMTI_HEAP_REFERENCE_OTHER; 2593 } 2594 } 2595 2596 // some objects are ignored - in the case of simple 2597 // roots it's mostly Symbol*s that we are skipping 2598 // here. 2599 if (!ServiceUtil::visible_oop(o)) { 2600 return; 2601 } 2602 2603 // invoke the callback 2604 _continue = CallbackInvoker::report_simple_root(kind, o); 2605 2606 } 2607 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2608 }; 2609 2610 // A supporting closure used to process JNI locals 2611 class JNILocalRootsClosure : public OopClosure { 2612 private: 2613 jlong _thread_tag; 2614 jlong _tid; 2615 jint _depth; 2616 jmethodID _method; 2617 bool _continue; 2618 public: 2619 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) { 2620 _thread_tag = thread_tag; 2621 _tid = tid; 2622 _depth = depth; 2623 _method = method; 2624 _continue = true; 2625 } 2626 2627 inline bool stopped() { 2628 return !_continue; 2629 } 2630 2631 void do_oop(oop* obj_p) { 2632 // iteration has terminated 2633 if (stopped()) { 2634 return; 2635 } 2636 2637 // ignore null or deleted handles 2638 oop o = *obj_p; 2639 if (o == NULL || oopDesc::equals(o, JNIHandles::deleted_handle())) { 2640 return; 2641 } 2642 2643 if (!ServiceUtil::visible_oop(o)) { 2644 return; 2645 } 2646 2647 // invoke the callback 2648 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o); 2649 } 2650 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2651 }; 2652 2653 2654 // A VM operation to iterate over objects that are reachable from 2655 // a set of roots or an initial object. 2656 // 2657 // For VM_HeapWalkOperation the set of roots used is :- 2658 // 2659 // - All JNI global references 2660 // - All inflated monitors 2661 // - All classes loaded by the boot class loader (or all classes 2662 // in the event that class unloading is disabled) 2663 // - All java threads 2664 // - For each java thread then all locals and JNI local references 2665 // on the thread's execution stack 2666 // - All visible/explainable objects from Universes::oops_do 2667 // 2668 class VM_HeapWalkOperation: public VM_Operation { 2669 private: 2670 enum { 2671 initial_visit_stack_size = 4000 2672 }; 2673 2674 bool _is_advanced_heap_walk; // indicates FollowReferences 2675 JvmtiTagMap* _tag_map; 2676 Handle _initial_object; 2677 GrowableArray<oop>* _visit_stack; // the visit stack 2678 2679 bool _collecting_heap_roots; // are we collecting roots 2680 bool _following_object_refs; // are we following object references 2681 2682 bool _reporting_primitive_fields; // optional reporting 2683 bool _reporting_primitive_array_values; 2684 bool _reporting_string_values; 2685 2686 GrowableArray<oop>* create_visit_stack() { 2687 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true); 2688 } 2689 2690 // accessors 2691 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; } 2692 JvmtiTagMap* tag_map() const { return _tag_map; } 2693 Handle initial_object() const { return _initial_object; } 2694 2695 bool is_following_references() const { return _following_object_refs; } 2696 2697 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; } 2698 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; } 2699 bool is_reporting_string_values() const { return _reporting_string_values; } 2700 2701 GrowableArray<oop>* visit_stack() const { return _visit_stack; } 2702 2703 // iterate over the various object types 2704 inline bool iterate_over_array(oop o); 2705 inline bool iterate_over_type_array(oop o); 2706 inline bool iterate_over_class(oop o); 2707 inline bool iterate_over_object(oop o); 2708 2709 // root collection 2710 inline bool collect_simple_roots(); 2711 inline bool collect_stack_roots(); 2712 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk); 2713 2714 // visit an object 2715 inline bool visit(oop o); 2716 2717 public: 2718 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2719 Handle initial_object, 2720 BasicHeapWalkContext callbacks, 2721 const void* user_data); 2722 2723 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2724 Handle initial_object, 2725 AdvancedHeapWalkContext callbacks, 2726 const void* user_data); 2727 2728 ~VM_HeapWalkOperation(); 2729 2730 VMOp_Type type() const { return VMOp_HeapWalkOperation; } 2731 void doit(); 2732 }; 2733 2734 2735 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2736 Handle initial_object, 2737 BasicHeapWalkContext callbacks, 2738 const void* user_data) { 2739 _is_advanced_heap_walk = false; 2740 _tag_map = tag_map; 2741 _initial_object = initial_object; 2742 _following_object_refs = (callbacks.object_ref_callback() != NULL); 2743 _reporting_primitive_fields = false; 2744 _reporting_primitive_array_values = false; 2745 _reporting_string_values = false; 2746 _visit_stack = create_visit_stack(); 2747 2748 2749 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2750 } 2751 2752 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2753 Handle initial_object, 2754 AdvancedHeapWalkContext callbacks, 2755 const void* user_data) { 2756 _is_advanced_heap_walk = true; 2757 _tag_map = tag_map; 2758 _initial_object = initial_object; 2759 _following_object_refs = true; 2760 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);; 2761 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);; 2762 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);; 2763 _visit_stack = create_visit_stack(); 2764 2765 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2766 } 2767 2768 VM_HeapWalkOperation::~VM_HeapWalkOperation() { 2769 if (_following_object_refs) { 2770 assert(_visit_stack != NULL, "checking"); 2771 delete _visit_stack; 2772 _visit_stack = NULL; 2773 } 2774 } 2775 2776 // an array references its class and has a reference to 2777 // each element in the array 2778 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) { 2779 objArrayOop array = objArrayOop(o); 2780 2781 // array reference to its class 2782 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror(); 2783 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2784 return false; 2785 } 2786 2787 // iterate over the array and report each reference to a 2788 // non-null element 2789 for (int index=0; index<array->length(); index++) { 2790 oop elem = array->obj_at(index); 2791 if (elem == NULL) { 2792 continue; 2793 } 2794 2795 // report the array reference o[index] = elem 2796 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) { 2797 return false; 2798 } 2799 } 2800 return true; 2801 } 2802 2803 // a type array references its class 2804 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) { 2805 Klass* k = o->klass(); 2806 oop mirror = k->java_mirror(); 2807 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2808 return false; 2809 } 2810 2811 // report the array contents if required 2812 if (is_reporting_primitive_array_values()) { 2813 if (!CallbackInvoker::report_primitive_array_values(o)) { 2814 return false; 2815 } 2816 } 2817 return true; 2818 } 2819 2820 #ifdef ASSERT 2821 // verify that a static oop field is in range 2822 static inline bool verify_static_oop(InstanceKlass* ik, 2823 oop mirror, int offset) { 2824 address obj_p = (address)mirror + offset; 2825 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror); 2826 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize); 2827 assert(end >= start, "sanity check"); 2828 2829 if (obj_p >= start && obj_p < end) { 2830 return true; 2831 } else { 2832 return false; 2833 } 2834 } 2835 #endif // #ifdef ASSERT 2836 2837 // a class references its super class, interfaces, class loader, ... 2838 // and finally its static fields 2839 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) { 2840 int i; 2841 Klass* klass = java_lang_Class::as_Klass(java_class); 2842 2843 if (klass->is_instance_klass()) { 2844 InstanceKlass* ik = InstanceKlass::cast(klass); 2845 2846 // Ignore the class if it hasn't been initialized yet 2847 if (!ik->is_linked()) { 2848 return true; 2849 } 2850 2851 // get the java mirror 2852 oop mirror = klass->java_mirror(); 2853 2854 // super (only if something more interesting than java.lang.Object) 2855 Klass* java_super = ik->java_super(); 2856 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) { 2857 oop super = java_super->java_mirror(); 2858 if (!CallbackInvoker::report_superclass_reference(mirror, super)) { 2859 return false; 2860 } 2861 } 2862 2863 // class loader 2864 oop cl = ik->class_loader(); 2865 if (cl != NULL) { 2866 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) { 2867 return false; 2868 } 2869 } 2870 2871 // protection domain 2872 oop pd = ik->protection_domain(); 2873 if (pd != NULL) { 2874 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) { 2875 return false; 2876 } 2877 } 2878 2879 // signers 2880 oop signers = ik->signers(); 2881 if (signers != NULL) { 2882 if (!CallbackInvoker::report_signers_reference(mirror, signers)) { 2883 return false; 2884 } 2885 } 2886 2887 // references from the constant pool 2888 { 2889 ConstantPool* pool = ik->constants(); 2890 for (int i = 1; i < pool->length(); i++) { 2891 constantTag tag = pool->tag_at(i).value(); 2892 if (tag.is_string() || tag.is_klass()) { 2893 oop entry; 2894 if (tag.is_string()) { 2895 entry = pool->resolved_string_at(i); 2896 // If the entry is non-null it is resolved. 2897 if (entry == NULL) continue; 2898 } else { 2899 entry = pool->resolved_klass_at(i)->java_mirror(); 2900 } 2901 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) { 2902 return false; 2903 } 2904 } 2905 } 2906 } 2907 2908 // interfaces 2909 // (These will already have been reported as references from the constant pool 2910 // but are specified by IterateOverReachableObjects and must be reported). 2911 Array<Klass*>* interfaces = ik->local_interfaces(); 2912 for (i = 0; i < interfaces->length(); i++) { 2913 oop interf = ((Klass*)interfaces->at(i))->java_mirror(); 2914 if (interf == NULL) { 2915 continue; 2916 } 2917 if (!CallbackInvoker::report_interface_reference(mirror, interf)) { 2918 return false; 2919 } 2920 } 2921 2922 // iterate over the static fields 2923 2924 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 2925 for (i=0; i<field_map->field_count(); i++) { 2926 ClassFieldDescriptor* field = field_map->field_at(i); 2927 char type = field->field_type(); 2928 if (!is_primitive_field_type(type)) { 2929 oop fld_o = mirror->obj_field(field->field_offset()); 2930 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check"); 2931 if (fld_o != NULL) { 2932 int slot = field->field_index(); 2933 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) { 2934 delete field_map; 2935 return false; 2936 } 2937 } 2938 } else { 2939 if (is_reporting_primitive_fields()) { 2940 address addr = (address)mirror + field->field_offset(); 2941 int slot = field->field_index(); 2942 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) { 2943 delete field_map; 2944 return false; 2945 } 2946 } 2947 } 2948 } 2949 delete field_map; 2950 2951 return true; 2952 } 2953 2954 return true; 2955 } 2956 2957 // an object references a class and its instance fields 2958 // (static fields are ignored here as we report these as 2959 // references from the class). 2960 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) { 2961 // reference to the class 2962 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) { 2963 return false; 2964 } 2965 2966 // iterate over instance fields 2967 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o); 2968 for (int i=0; i<field_map->field_count(); i++) { 2969 ClassFieldDescriptor* field = field_map->field_at(i); 2970 char type = field->field_type(); 2971 if (!is_primitive_field_type(type)) { 2972 oop fld_o = o->obj_field(field->field_offset()); 2973 // ignore any objects that aren't visible to profiler 2974 if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) { 2975 assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not " 2976 "have references to Klass* anymore"); 2977 int slot = field->field_index(); 2978 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) { 2979 return false; 2980 } 2981 } 2982 } else { 2983 if (is_reporting_primitive_fields()) { 2984 // primitive instance field 2985 address addr = (address)o + field->field_offset(); 2986 int slot = field->field_index(); 2987 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) { 2988 return false; 2989 } 2990 } 2991 } 2992 } 2993 2994 // if the object is a java.lang.String 2995 if (is_reporting_string_values() && 2996 o->klass() == SystemDictionary::String_klass()) { 2997 if (!CallbackInvoker::report_string_value(o)) { 2998 return false; 2999 } 3000 } 3001 return true; 3002 } 3003 3004 3005 // Collects all simple (non-stack) roots except for threads; 3006 // threads are handled in collect_stack_roots() as an optimization. 3007 // if there's a heap root callback provided then the callback is 3008 // invoked for each simple root. 3009 // if an object reference callback is provided then all simple 3010 // roots are pushed onto the marking stack so that they can be 3011 // processed later 3012 // 3013 inline bool VM_HeapWalkOperation::collect_simple_roots() { 3014 SimpleRootsClosure blk; 3015 3016 // JNI globals 3017 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL); 3018 JNIHandles::oops_do(&blk); 3019 if (blk.stopped()) { 3020 return false; 3021 } 3022 3023 // Preloaded classes and loader from the system dictionary 3024 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS); 3025 SystemDictionary::always_strong_oops_do(&blk); 3026 KlassToOopClosure klass_blk(&blk); 3027 ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false); 3028 if (blk.stopped()) { 3029 return false; 3030 } 3031 3032 // Inflated monitors 3033 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR); 3034 ObjectSynchronizer::oops_do(&blk); 3035 if (blk.stopped()) { 3036 return false; 3037 } 3038 3039 // threads are now handled in collect_stack_roots() 3040 3041 // Other kinds of roots maintained by HotSpot 3042 // Many of these won't be visible but others (such as instances of important 3043 // exceptions) will be visible. 3044 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 3045 Universe::oops_do(&blk); 3046 3047 // If there are any non-perm roots in the code cache, visit them. 3048 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 3049 CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations); 3050 CodeCache::scavenge_root_nmethods_do(&look_in_blobs); 3051 3052 return true; 3053 } 3054 3055 // Walk the stack of a given thread and find all references (locals 3056 // and JNI calls) and report these as stack references 3057 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread, 3058 JNILocalRootsClosure* blk) 3059 { 3060 oop threadObj = java_thread->threadObj(); 3061 assert(threadObj != NULL, "sanity check"); 3062 3063 // only need to get the thread's tag once per thread 3064 jlong thread_tag = tag_for(_tag_map, threadObj); 3065 3066 // also need the thread id 3067 jlong tid = java_lang_Thread::thread_id(threadObj); 3068 3069 3070 if (java_thread->has_last_Java_frame()) { 3071 3072 // vframes are resource allocated 3073 Thread* current_thread = Thread::current(); 3074 ResourceMark rm(current_thread); 3075 HandleMark hm(current_thread); 3076 3077 RegisterMap reg_map(java_thread); 3078 frame f = java_thread->last_frame(); 3079 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); 3080 3081 bool is_top_frame = true; 3082 int depth = 0; 3083 frame* last_entry_frame = NULL; 3084 3085 while (vf != NULL) { 3086 if (vf->is_java_frame()) { 3087 3088 // java frame (interpreted, compiled, ...) 3089 javaVFrame *jvf = javaVFrame::cast(vf); 3090 3091 // the jmethodID 3092 jmethodID method = jvf->method()->jmethod_id(); 3093 3094 if (!(jvf->method()->is_native())) { 3095 jlocation bci = (jlocation)jvf->bci(); 3096 StackValueCollection* locals = jvf->locals(); 3097 for (int slot=0; slot<locals->size(); slot++) { 3098 if (locals->at(slot)->type() == T_OBJECT) { 3099 oop o = locals->obj_at(slot)(); 3100 if (o == NULL) { 3101 continue; 3102 } 3103 3104 // stack reference 3105 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3106 bci, slot, o)) { 3107 return false; 3108 } 3109 } 3110 } 3111 3112 StackValueCollection* exprs = jvf->expressions(); 3113 for (int index=0; index < exprs->size(); index++) { 3114 if (exprs->at(index)->type() == T_OBJECT) { 3115 oop o = exprs->obj_at(index)(); 3116 if (o == NULL) { 3117 continue; 3118 } 3119 3120 // stack reference 3121 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3122 bci, locals->size() + index, o)) { 3123 return false; 3124 } 3125 } 3126 } 3127 3128 // Follow oops from compiled nmethod 3129 if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) { 3130 blk->set_context(thread_tag, tid, depth, method); 3131 jvf->cb()->as_nmethod()->oops_do(blk); 3132 } 3133 } else { 3134 blk->set_context(thread_tag, tid, depth, method); 3135 if (is_top_frame) { 3136 // JNI locals for the top frame. 3137 java_thread->active_handles()->oops_do(blk); 3138 } else { 3139 if (last_entry_frame != NULL) { 3140 // JNI locals for the entry frame 3141 assert(last_entry_frame->is_entry_frame(), "checking"); 3142 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk); 3143 } 3144 } 3145 } 3146 last_entry_frame = NULL; 3147 depth++; 3148 } else { 3149 // externalVFrame - for an entry frame then we report the JNI locals 3150 // when we find the corresponding javaVFrame 3151 frame* fr = vf->frame_pointer(); 3152 assert(fr != NULL, "sanity check"); 3153 if (fr->is_entry_frame()) { 3154 last_entry_frame = fr; 3155 } 3156 } 3157 3158 vf = vf->sender(); 3159 is_top_frame = false; 3160 } 3161 } else { 3162 // no last java frame but there may be JNI locals 3163 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL); 3164 java_thread->active_handles()->oops_do(blk); 3165 } 3166 return true; 3167 } 3168 3169 3170 // Collects the simple roots for all threads and collects all 3171 // stack roots - for each thread it walks the execution 3172 // stack to find all references and local JNI refs. 3173 inline bool VM_HeapWalkOperation::collect_stack_roots() { 3174 JNILocalRootsClosure blk; 3175 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) { 3176 oop threadObj = thread->threadObj(); 3177 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) { 3178 // Collect the simple root for this thread before we 3179 // collect its stack roots 3180 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, 3181 threadObj)) { 3182 return false; 3183 } 3184 if (!collect_stack_roots(thread, &blk)) { 3185 return false; 3186 } 3187 } 3188 } 3189 return true; 3190 } 3191 3192 // visit an object 3193 // first mark the object as visited 3194 // second get all the outbound references from this object (in other words, all 3195 // the objects referenced by this object). 3196 // 3197 bool VM_HeapWalkOperation::visit(oop o) { 3198 // mark object as visited 3199 assert(!ObjectMarker::visited(o), "can't visit same object more than once"); 3200 ObjectMarker::mark(o); 3201 3202 // instance 3203 if (o->is_instance()) { 3204 if (o->klass() == SystemDictionary::Class_klass()) { 3205 if (!java_lang_Class::is_primitive(o)) { 3206 // a java.lang.Class 3207 return iterate_over_class(o); 3208 } 3209 } else { 3210 return iterate_over_object(o); 3211 } 3212 } 3213 3214 // object array 3215 if (o->is_objArray()) { 3216 return iterate_over_array(o); 3217 } 3218 3219 // type array 3220 if (o->is_typeArray()) { 3221 return iterate_over_type_array(o); 3222 } 3223 3224 return true; 3225 } 3226 3227 void VM_HeapWalkOperation::doit() { 3228 ResourceMark rm; 3229 ObjectMarkerController marker; 3230 ClassFieldMapCacheMark cm; 3231 3232 assert(visit_stack()->is_empty(), "visit stack must be empty"); 3233 3234 // the heap walk starts with an initial object or the heap roots 3235 if (initial_object().is_null()) { 3236 // If either collect_stack_roots() or collect_simple_roots() 3237 // returns false at this point, then there are no mark bits 3238 // to reset. 3239 ObjectMarker::set_needs_reset(false); 3240 3241 // Calling collect_stack_roots() before collect_simple_roots() 3242 // can result in a big performance boost for an agent that is 3243 // focused on analyzing references in the thread stacks. 3244 if (!collect_stack_roots()) return; 3245 3246 if (!collect_simple_roots()) return; 3247 3248 // no early return so enable heap traversal to reset the mark bits 3249 ObjectMarker::set_needs_reset(true); 3250 } else { 3251 visit_stack()->push(initial_object()()); 3252 } 3253 3254 // object references required 3255 if (is_following_references()) { 3256 3257 // visit each object until all reachable objects have been 3258 // visited or the callback asked to terminate the iteration. 3259 while (!visit_stack()->is_empty()) { 3260 oop o = visit_stack()->pop(); 3261 if (!ObjectMarker::visited(o)) { 3262 if (!visit(o)) { 3263 break; 3264 } 3265 } 3266 } 3267 } 3268 } 3269 3270 // iterate over all objects that are reachable from a set of roots 3271 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback, 3272 jvmtiStackReferenceCallback stack_ref_callback, 3273 jvmtiObjectReferenceCallback object_ref_callback, 3274 const void* user_data) { 3275 MutexLocker ml(Heap_lock); 3276 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback); 3277 VM_HeapWalkOperation op(this, Handle(), context, user_data); 3278 VMThread::execute(&op); 3279 } 3280 3281 // iterate over all objects that are reachable from a given object 3282 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object, 3283 jvmtiObjectReferenceCallback object_ref_callback, 3284 const void* user_data) { 3285 oop obj = JNIHandles::resolve(object); 3286 Handle initial_object(Thread::current(), obj); 3287 3288 MutexLocker ml(Heap_lock); 3289 BasicHeapWalkContext context(NULL, NULL, object_ref_callback); 3290 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3291 VMThread::execute(&op); 3292 } 3293 3294 // follow references from an initial object or the GC roots 3295 void JvmtiTagMap::follow_references(jint heap_filter, 3296 Klass* klass, 3297 jobject object, 3298 const jvmtiHeapCallbacks* callbacks, 3299 const void* user_data) 3300 { 3301 oop obj = JNIHandles::resolve(object); 3302 Handle initial_object(Thread::current(), obj); 3303 3304 MutexLocker ml(Heap_lock); 3305 AdvancedHeapWalkContext context(heap_filter, klass, callbacks); 3306 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3307 VMThread::execute(&op); 3308 } 3309 3310 3311 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { 3312 // No locks during VM bring-up (0 threads) and no safepoints after main 3313 // thread creation and before VMThread creation (1 thread); initial GC 3314 // verification can happen in that window which gets to here. 3315 assert(Threads::number_of_threads() <= 1 || 3316 SafepointSynchronize::is_at_safepoint(), 3317 "must be executed at a safepoint"); 3318 if (JvmtiEnv::environments_might_exist()) { 3319 JvmtiEnvIterator it; 3320 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) { 3321 JvmtiTagMap* tag_map = env->tag_map(); 3322 if (tag_map != NULL && !tag_map->is_empty()) { 3323 tag_map->do_weak_oops(is_alive, f); 3324 } 3325 } 3326 } 3327 } 3328 3329 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) { 3330 3331 // does this environment have the OBJECT_FREE event enabled 3332 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE); 3333 3334 // counters used for trace message 3335 int freed = 0; 3336 int moved = 0; 3337 3338 JvmtiTagHashmap* hashmap = this->hashmap(); 3339 3340 // reenable sizing (if disabled) 3341 hashmap->set_resizing_enabled(true); 3342 3343 // if the hashmap is empty then we can skip it 3344 if (hashmap->_entry_count == 0) { 3345 return; 3346 } 3347 3348 // now iterate through each entry in the table 3349 3350 JvmtiTagHashmapEntry** table = hashmap->table(); 3351 int size = hashmap->size(); 3352 3353 JvmtiTagHashmapEntry* delayed_add = NULL; 3354 3355 for (int pos = 0; pos < size; ++pos) { 3356 JvmtiTagHashmapEntry* entry = table[pos]; 3357 JvmtiTagHashmapEntry* prev = NULL; 3358 3359 while (entry != NULL) { 3360 JvmtiTagHashmapEntry* next = entry->next(); 3361 3362 oop* obj = entry->object_addr(); 3363 3364 // has object been GC'ed 3365 if (!is_alive->do_object_b(entry->object())) { 3366 // grab the tag 3367 jlong tag = entry->tag(); 3368 guarantee(tag != 0, "checking"); 3369 3370 // remove GC'ed entry from hashmap and return the 3371 // entry to the free list 3372 hashmap->remove(prev, pos, entry); 3373 destroy_entry(entry); 3374 3375 // post the event to the profiler 3376 if (post_object_free) { 3377 JvmtiExport::post_object_free(env(), tag); 3378 } 3379 3380 ++freed; 3381 } else { 3382 f->do_oop(entry->object_addr()); 3383 oop new_oop = entry->object(); 3384 3385 // if the object has moved then re-hash it and move its 3386 // entry to its new location. 3387 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size); 3388 if (new_pos != (unsigned int)pos) { 3389 if (prev == NULL) { 3390 table[pos] = next; 3391 } else { 3392 prev->set_next(next); 3393 } 3394 if (new_pos < (unsigned int)pos) { 3395 entry->set_next(table[new_pos]); 3396 table[new_pos] = entry; 3397 } else { 3398 // Delay adding this entry to it's new position as we'd end up 3399 // hitting it again during this iteration. 3400 entry->set_next(delayed_add); 3401 delayed_add = entry; 3402 } 3403 moved++; 3404 } else { 3405 // object didn't move 3406 prev = entry; 3407 } 3408 } 3409 3410 entry = next; 3411 } 3412 } 3413 3414 // Re-add all the entries which were kept aside 3415 while (delayed_add != NULL) { 3416 JvmtiTagHashmapEntry* next = delayed_add->next(); 3417 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size); 3418 delayed_add->set_next(table[pos]); 3419 table[pos] = delayed_add; 3420 delayed_add = next; 3421 } 3422 3423 log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)", 3424 hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved); 3425 }