1 /* 2 * Copyright (c) 2003, 2015, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/symbolTable.hpp" 27 #include "classfile/systemDictionary.hpp" 28 #include "classfile/vmSymbols.hpp" 29 #include "code/codeCache.hpp" 30 #include "jvmtifiles/jvmtiEnv.hpp" 31 #include "oops/instanceMirrorKlass.hpp" 32 #include "oops/objArrayKlass.hpp" 33 #include "oops/objArrayOop.inline.hpp" 34 #include "oops/oop.inline.hpp" 35 #include "prims/jvmtiEventController.hpp" 36 #include "prims/jvmtiEventController.inline.hpp" 37 #include "prims/jvmtiExport.hpp" 38 #include "prims/jvmtiImpl.hpp" 39 #include "prims/jvmtiTagMap.hpp" 40 #include "runtime/biasedLocking.hpp" 41 #include "runtime/javaCalls.hpp" 42 #include "runtime/jniHandles.hpp" 43 #include "runtime/mutex.hpp" 44 #include "runtime/mutexLocker.hpp" 45 #include "runtime/reflectionUtils.hpp" 46 #include "runtime/vframe.hpp" 47 #include "runtime/vmThread.hpp" 48 #include "runtime/vm_operations.hpp" 49 #include "services/serviceUtil.hpp" 50 #include "utilities/macros.hpp" 51 #if INCLUDE_ALL_GCS 52 #include "gc/parallel/parallelScavengeHeap.hpp" 53 #endif // INCLUDE_ALL_GCS 54 55 // JvmtiTagHashmapEntry 56 // 57 // Each entry encapsulates a reference to the tagged object 58 // and the tag value. In addition an entry includes a next pointer which 59 // is used to chain entries together. 60 61 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> { 62 private: 63 friend class JvmtiTagMap; 64 65 oop _object; // tagged object 66 jlong _tag; // the tag 67 JvmtiTagHashmapEntry* _next; // next on the list 68 69 inline void init(oop object, jlong tag) { 70 _object = object; 71 _tag = tag; 72 _next = NULL; 73 } 74 75 // constructor 76 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); } 77 78 public: 79 80 // accessor methods 81 inline oop object() const { return _object; } 82 inline oop* object_addr() { return &_object; } 83 inline jlong tag() const { return _tag; } 84 85 inline void set_tag(jlong tag) { 86 assert(tag != 0, "can't be zero"); 87 _tag = tag; 88 } 89 90 inline JvmtiTagHashmapEntry* next() const { return _next; } 91 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; } 92 }; 93 94 95 // JvmtiTagHashmap 96 // 97 // A hashmap is essentially a table of pointers to entries. Entries 98 // are hashed to a location, or position in the table, and then 99 // chained from that location. The "key" for hashing is address of 100 // the object, or oop. The "value" is the tag value. 101 // 102 // A hashmap maintains a count of the number entries in the hashmap 103 // and resizes if the number of entries exceeds a given threshold. 104 // The threshold is specified as a percentage of the size - for 105 // example a threshold of 0.75 will trigger the hashmap to resize 106 // if the number of entries is >75% of table size. 107 // 108 // A hashmap provides functions for adding, removing, and finding 109 // entries. It also provides a function to iterate over all entries 110 // in the hashmap. 111 112 class JvmtiTagHashmap : public CHeapObj<mtInternal> { 113 private: 114 friend class JvmtiTagMap; 115 116 enum { 117 small_trace_threshold = 10000, // threshold for tracing 118 medium_trace_threshold = 100000, 119 large_trace_threshold = 1000000, 120 initial_trace_threshold = small_trace_threshold 121 }; 122 123 static int _sizes[]; // array of possible hashmap sizes 124 int _size; // actual size of the table 125 int _size_index; // index into size table 126 127 int _entry_count; // number of entries in the hashmap 128 129 float _load_factor; // load factor as a % of the size 130 int _resize_threshold; // computed threshold to trigger resizing. 131 bool _resizing_enabled; // indicates if hashmap can resize 132 133 int _trace_threshold; // threshold for trace messages 134 135 JvmtiTagHashmapEntry** _table; // the table of entries. 136 137 // private accessors 138 int resize_threshold() const { return _resize_threshold; } 139 int trace_threshold() const { return _trace_threshold; } 140 141 // initialize the hashmap 142 void init(int size_index=0, float load_factor=4.0f) { 143 int initial_size = _sizes[size_index]; 144 _size_index = size_index; 145 _size = initial_size; 146 _entry_count = 0; 147 if (TraceJVMTIObjectTagging) { 148 _trace_threshold = initial_trace_threshold; 149 } else { 150 _trace_threshold = -1; 151 } 152 _load_factor = load_factor; 153 _resize_threshold = (int)(_load_factor * _size); 154 _resizing_enabled = true; 155 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*); 156 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 157 if (_table == NULL) { 158 vm_exit_out_of_memory(s, OOM_MALLOC_ERROR, 159 "unable to allocate initial hashtable for jvmti object tags"); 160 } 161 for (int i=0; i<initial_size; i++) { 162 _table[i] = NULL; 163 } 164 } 165 166 // hash a given key (oop) with the specified size 167 static unsigned int hash(oop key, int size) { 168 // shift right to get better distribution (as these bits will be zero 169 // with aligned addresses) 170 unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key)); 171 #ifdef _LP64 172 return (addr >> 3) % size; 173 #else 174 return (addr >> 2) % size; 175 #endif 176 } 177 178 // hash a given key (oop) 179 unsigned int hash(oop key) { 180 return hash(key, _size); 181 } 182 183 // resize the hashmap - allocates a large table and re-hashes 184 // all entries into the new table. 185 void resize() { 186 int new_size_index = _size_index+1; 187 int new_size = _sizes[new_size_index]; 188 if (new_size < 0) { 189 // hashmap already at maximum capacity 190 return; 191 } 192 193 // allocate new table 194 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*); 195 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 196 if (new_table == NULL) { 197 warning("unable to allocate larger hashtable for jvmti object tags"); 198 set_resizing_enabled(false); 199 return; 200 } 201 202 // initialize new table 203 int i; 204 for (i=0; i<new_size; i++) { 205 new_table[i] = NULL; 206 } 207 208 // rehash all entries into the new table 209 for (i=0; i<_size; i++) { 210 JvmtiTagHashmapEntry* entry = _table[i]; 211 while (entry != NULL) { 212 JvmtiTagHashmapEntry* next = entry->next(); 213 oop key = entry->object(); 214 assert(key != NULL, "jni weak reference cleared!!"); 215 unsigned int h = hash(key, new_size); 216 JvmtiTagHashmapEntry* anchor = new_table[h]; 217 if (anchor == NULL) { 218 new_table[h] = entry; 219 entry->set_next(NULL); 220 } else { 221 entry->set_next(anchor); 222 new_table[h] = entry; 223 } 224 entry = next; 225 } 226 } 227 228 // free old table and update settings. 229 os::free((void*)_table); 230 _table = new_table; 231 _size_index = new_size_index; 232 _size = new_size; 233 234 // compute new resize threshold 235 _resize_threshold = (int)(_load_factor * _size); 236 } 237 238 239 // internal remove function - remove an entry at a given position in the 240 // table. 241 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) { 242 assert(pos >= 0 && pos < _size, "out of range"); 243 if (prev == NULL) { 244 _table[pos] = entry->next(); 245 } else { 246 prev->set_next(entry->next()); 247 } 248 assert(_entry_count > 0, "checking"); 249 _entry_count--; 250 } 251 252 // resizing switch 253 bool is_resizing_enabled() const { return _resizing_enabled; } 254 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; } 255 256 // debugging 257 void print_memory_usage(); 258 void compute_next_trace_threshold(); 259 260 public: 261 262 // create a JvmtiTagHashmap of a preferred size and optionally a load factor. 263 // The preferred size is rounded down to an actual size. 264 JvmtiTagHashmap(int size, float load_factor=0.0f) { 265 int i=0; 266 while (_sizes[i] < size) { 267 if (_sizes[i] < 0) { 268 assert(i > 0, "sanity check"); 269 i--; 270 break; 271 } 272 i++; 273 } 274 275 // if a load factor is specified then use it, otherwise use default 276 if (load_factor > 0.01f) { 277 init(i, load_factor); 278 } else { 279 init(i); 280 } 281 } 282 283 // create a JvmtiTagHashmap with default settings 284 JvmtiTagHashmap() { 285 init(); 286 } 287 288 // release table when JvmtiTagHashmap destroyed 289 ~JvmtiTagHashmap() { 290 if (_table != NULL) { 291 os::free((void*)_table); 292 _table = NULL; 293 } 294 } 295 296 // accessors 297 int size() const { return _size; } 298 JvmtiTagHashmapEntry** table() const { return _table; } 299 int entry_count() const { return _entry_count; } 300 301 // find an entry in the hashmap, returns NULL if not found. 302 inline JvmtiTagHashmapEntry* find(oop key) { 303 unsigned int h = hash(key); 304 JvmtiTagHashmapEntry* entry = _table[h]; 305 while (entry != NULL) { 306 if (entry->object() == key) { 307 return entry; 308 } 309 entry = entry->next(); 310 } 311 return NULL; 312 } 313 314 315 // add a new entry to hashmap 316 inline void add(oop key, JvmtiTagHashmapEntry* entry) { 317 assert(key != NULL, "checking"); 318 assert(find(key) == NULL, "duplicate detected"); 319 unsigned int h = hash(key); 320 JvmtiTagHashmapEntry* anchor = _table[h]; 321 if (anchor == NULL) { 322 _table[h] = entry; 323 entry->set_next(NULL); 324 } else { 325 entry->set_next(anchor); 326 _table[h] = entry; 327 } 328 329 _entry_count++; 330 if (trace_threshold() > 0 && entry_count() >= trace_threshold()) { 331 assert(TraceJVMTIObjectTagging, "should only get here when tracing"); 332 print_memory_usage(); 333 compute_next_trace_threshold(); 334 } 335 336 // if the number of entries exceed the threshold then resize 337 if (entry_count() > resize_threshold() && is_resizing_enabled()) { 338 resize(); 339 } 340 } 341 342 // remove an entry with the given key. 343 inline JvmtiTagHashmapEntry* remove(oop key) { 344 unsigned int h = hash(key); 345 JvmtiTagHashmapEntry* entry = _table[h]; 346 JvmtiTagHashmapEntry* prev = NULL; 347 while (entry != NULL) { 348 if (key == entry->object()) { 349 break; 350 } 351 prev = entry; 352 entry = entry->next(); 353 } 354 if (entry != NULL) { 355 remove(prev, h, entry); 356 } 357 return entry; 358 } 359 360 // iterate over all entries in the hashmap 361 void entry_iterate(JvmtiTagHashmapEntryClosure* closure); 362 }; 363 364 // possible hashmap sizes - odd primes that roughly double in size. 365 // To avoid excessive resizing the odd primes from 4801-76831 and 366 // 76831-307261 have been removed. The list must be terminated by -1. 367 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891, 368 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 }; 369 370 371 // A supporting class for iterating over all entries in Hashmap 372 class JvmtiTagHashmapEntryClosure { 373 public: 374 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0; 375 }; 376 377 378 // iterate over all entries in the hashmap 379 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 380 for (int i=0; i<_size; i++) { 381 JvmtiTagHashmapEntry* entry = _table[i]; 382 JvmtiTagHashmapEntry* prev = NULL; 383 while (entry != NULL) { 384 // obtain the next entry before invoking do_entry - this is 385 // necessary because do_entry may remove the entry from the 386 // hashmap. 387 JvmtiTagHashmapEntry* next = entry->next(); 388 closure->do_entry(entry); 389 entry = next; 390 } 391 } 392 } 393 394 // debugging 395 void JvmtiTagHashmap::print_memory_usage() { 396 intptr_t p = (intptr_t)this; 397 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p); 398 399 // table + entries in KB 400 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) + 401 entry_count()*sizeof(JvmtiTagHashmapEntry))/K; 402 403 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K); 404 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]", 405 entry_count(), hashmap_usage, weak_globals_usage); 406 } 407 408 // compute threshold for the next trace message 409 void JvmtiTagHashmap::compute_next_trace_threshold() { 410 if (trace_threshold() < medium_trace_threshold) { 411 _trace_threshold += small_trace_threshold; 412 } else { 413 if (trace_threshold() < large_trace_threshold) { 414 _trace_threshold += medium_trace_threshold; 415 } else { 416 _trace_threshold += large_trace_threshold; 417 } 418 } 419 } 420 421 // create a JvmtiTagMap 422 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) : 423 _env(env), 424 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false), 425 _free_entries(NULL), 426 _free_entries_count(0) 427 { 428 assert(JvmtiThreadState_lock->is_locked(), "sanity check"); 429 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment"); 430 431 _hashmap = new JvmtiTagHashmap(); 432 433 // finally add us to the environment 434 ((JvmtiEnvBase *)env)->set_tag_map(this); 435 } 436 437 438 // destroy a JvmtiTagMap 439 JvmtiTagMap::~JvmtiTagMap() { 440 441 // no lock acquired as we assume the enclosing environment is 442 // also being destroryed. 443 ((JvmtiEnvBase *)_env)->set_tag_map(NULL); 444 445 JvmtiTagHashmapEntry** table = _hashmap->table(); 446 for (int j = 0; j < _hashmap->size(); j++) { 447 JvmtiTagHashmapEntry* entry = table[j]; 448 while (entry != NULL) { 449 JvmtiTagHashmapEntry* next = entry->next(); 450 delete entry; 451 entry = next; 452 } 453 } 454 455 // finally destroy the hashmap 456 delete _hashmap; 457 _hashmap = NULL; 458 459 // remove any entries on the free list 460 JvmtiTagHashmapEntry* entry = _free_entries; 461 while (entry != NULL) { 462 JvmtiTagHashmapEntry* next = entry->next(); 463 delete entry; 464 entry = next; 465 } 466 _free_entries = NULL; 467 } 468 469 // create a hashmap entry 470 // - if there's an entry on the (per-environment) free list then this 471 // is returned. Otherwise an new entry is allocated. 472 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) { 473 assert(Thread::current()->is_VM_thread() || is_locked(), "checking"); 474 JvmtiTagHashmapEntry* entry; 475 if (_free_entries == NULL) { 476 entry = new JvmtiTagHashmapEntry(ref, tag); 477 } else { 478 assert(_free_entries_count > 0, "mismatched _free_entries_count"); 479 _free_entries_count--; 480 entry = _free_entries; 481 _free_entries = entry->next(); 482 entry->init(ref, tag); 483 } 484 return entry; 485 } 486 487 // destroy an entry by returning it to the free list 488 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) { 489 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 490 // limit the size of the free list 491 if (_free_entries_count >= max_free_entries) { 492 delete entry; 493 } else { 494 entry->set_next(_free_entries); 495 _free_entries = entry; 496 _free_entries_count++; 497 } 498 } 499 500 // returns the tag map for the given environments. If the tag map 501 // doesn't exist then it is created. 502 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) { 503 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map(); 504 if (tag_map == NULL) { 505 MutexLocker mu(JvmtiThreadState_lock); 506 tag_map = ((JvmtiEnvBase*)env)->tag_map(); 507 if (tag_map == NULL) { 508 tag_map = new JvmtiTagMap(env); 509 } 510 } else { 511 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops()); 512 } 513 return tag_map; 514 } 515 516 // iterate over all entries in the tag map. 517 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 518 hashmap()->entry_iterate(closure); 519 } 520 521 // returns true if the hashmaps are empty 522 bool JvmtiTagMap::is_empty() { 523 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 524 return hashmap()->entry_count() == 0; 525 } 526 527 528 // Return the tag value for an object, or 0 if the object is 529 // not tagged 530 // 531 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) { 532 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o); 533 if (entry == NULL) { 534 return 0; 535 } else { 536 return entry->tag(); 537 } 538 } 539 540 541 // A CallbackWrapper is a support class for querying and tagging an object 542 // around a callback to a profiler. The constructor does pre-callback 543 // work to get the tag value, klass tag value, ... and the destructor 544 // does the post-callback work of tagging or untagging the object. 545 // 546 // { 547 // CallbackWrapper wrapper(tag_map, o); 548 // 549 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...) 550 // 551 // } // wrapper goes out of scope here which results in the destructor 552 // checking to see if the object has been tagged, untagged, or the 553 // tag value has changed. 554 // 555 class CallbackWrapper : public StackObj { 556 private: 557 JvmtiTagMap* _tag_map; 558 JvmtiTagHashmap* _hashmap; 559 JvmtiTagHashmapEntry* _entry; 560 oop _o; 561 jlong _obj_size; 562 jlong _obj_tag; 563 jlong _klass_tag; 564 565 protected: 566 JvmtiTagMap* tag_map() const { return _tag_map; } 567 568 // invoked post-callback to tag, untag, or update the tag of an object 569 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap, 570 JvmtiTagHashmapEntry* entry, jlong obj_tag); 571 public: 572 CallbackWrapper(JvmtiTagMap* tag_map, oop o) { 573 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(), 574 "MT unsafe or must be VM thread"); 575 576 // object to tag 577 _o = o; 578 579 // object size 580 _obj_size = (jlong)_o->size() * wordSize; 581 582 // record the context 583 _tag_map = tag_map; 584 _hashmap = tag_map->hashmap(); 585 _entry = _hashmap->find(_o); 586 587 // get object tag 588 _obj_tag = (_entry == NULL) ? 0 : _entry->tag(); 589 590 // get the class and the class's tag value 591 assert(InstanceKlass::cast(SystemDictionary::Class_klass())->is_mirror_instance_klass(), "Is not?"); 592 593 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror()); 594 } 595 596 ~CallbackWrapper() { 597 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag); 598 } 599 600 inline jlong* obj_tag_p() { return &_obj_tag; } 601 inline jlong obj_size() const { return _obj_size; } 602 inline jlong obj_tag() const { return _obj_tag; } 603 inline jlong klass_tag() const { return _klass_tag; } 604 }; 605 606 607 608 // callback post-callback to tag, untag, or update the tag of an object 609 void inline CallbackWrapper::post_callback_tag_update(oop o, 610 JvmtiTagHashmap* hashmap, 611 JvmtiTagHashmapEntry* entry, 612 jlong obj_tag) { 613 if (entry == NULL) { 614 if (obj_tag != 0) { 615 // callback has tagged the object 616 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 617 entry = tag_map()->create_entry(o, obj_tag); 618 hashmap->add(o, entry); 619 } 620 } else { 621 // object was previously tagged - the callback may have untagged 622 // the object or changed the tag value 623 if (obj_tag == 0) { 624 625 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o); 626 assert(entry_removed == entry, "checking"); 627 tag_map()->destroy_entry(entry); 628 629 } else { 630 if (obj_tag != entry->tag()) { 631 entry->set_tag(obj_tag); 632 } 633 } 634 } 635 } 636 637 // An extended CallbackWrapper used when reporting an object reference 638 // to the agent. 639 // 640 // { 641 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o); 642 // 643 // (*callback)(wrapper.klass_tag(), 644 // wrapper.obj_size(), 645 // wrapper.obj_tag_p() 646 // wrapper.referrer_tag_p(), ...) 647 // 648 // } // wrapper goes out of scope here which results in the destructor 649 // checking to see if the referrer object has been tagged, untagged, 650 // or the tag value has changed. 651 // 652 class TwoOopCallbackWrapper : public CallbackWrapper { 653 private: 654 bool _is_reference_to_self; 655 JvmtiTagHashmap* _referrer_hashmap; 656 JvmtiTagHashmapEntry* _referrer_entry; 657 oop _referrer; 658 jlong _referrer_obj_tag; 659 jlong _referrer_klass_tag; 660 jlong* _referrer_tag_p; 661 662 bool is_reference_to_self() const { return _is_reference_to_self; } 663 664 public: 665 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) : 666 CallbackWrapper(tag_map, o) 667 { 668 // self reference needs to be handled in a special way 669 _is_reference_to_self = (referrer == o); 670 671 if (_is_reference_to_self) { 672 _referrer_klass_tag = klass_tag(); 673 _referrer_tag_p = obj_tag_p(); 674 } else { 675 _referrer = referrer; 676 // record the context 677 _referrer_hashmap = tag_map->hashmap(); 678 _referrer_entry = _referrer_hashmap->find(_referrer); 679 680 // get object tag 681 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag(); 682 _referrer_tag_p = &_referrer_obj_tag; 683 684 // get referrer class tag. 685 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror()); 686 } 687 } 688 689 ~TwoOopCallbackWrapper() { 690 if (!is_reference_to_self()){ 691 post_callback_tag_update(_referrer, 692 _referrer_hashmap, 693 _referrer_entry, 694 _referrer_obj_tag); 695 } 696 } 697 698 // address of referrer tag 699 // (for a self reference this will return the same thing as obj_tag_p()) 700 inline jlong* referrer_tag_p() { return _referrer_tag_p; } 701 702 // referrer's class tag 703 inline jlong referrer_klass_tag() { return _referrer_klass_tag; } 704 }; 705 706 // tag an object 707 // 708 // This function is performance critical. If many threads attempt to tag objects 709 // around the same time then it's possible that the Mutex associated with the 710 // tag map will be a hot lock. 711 void JvmtiTagMap::set_tag(jobject object, jlong tag) { 712 MutexLocker ml(lock()); 713 714 // resolve the object 715 oop o = JNIHandles::resolve_non_null(object); 716 717 // see if the object is already tagged 718 JvmtiTagHashmap* hashmap = _hashmap; 719 JvmtiTagHashmapEntry* entry = hashmap->find(o); 720 721 // if the object is not already tagged then we tag it 722 if (entry == NULL) { 723 if (tag != 0) { 724 entry = create_entry(o, tag); 725 hashmap->add(o, entry); 726 } else { 727 // no-op 728 } 729 } else { 730 // if the object is already tagged then we either update 731 // the tag (if a new tag value has been provided) 732 // or remove the object if the new tag value is 0. 733 if (tag == 0) { 734 hashmap->remove(o); 735 destroy_entry(entry); 736 } else { 737 entry->set_tag(tag); 738 } 739 } 740 } 741 742 // get the tag for an object 743 jlong JvmtiTagMap::get_tag(jobject object) { 744 MutexLocker ml(lock()); 745 746 // resolve the object 747 oop o = JNIHandles::resolve_non_null(object); 748 749 return tag_for(this, o); 750 } 751 752 753 // Helper class used to describe the static or instance fields of a class. 754 // For each field it holds the field index (as defined by the JVMTI specification), 755 // the field type, and the offset. 756 757 class ClassFieldDescriptor: public CHeapObj<mtInternal> { 758 private: 759 int _field_index; 760 int _field_offset; 761 char _field_type; 762 public: 763 ClassFieldDescriptor(int index, char type, int offset) : 764 _field_index(index), _field_type(type), _field_offset(offset) { 765 } 766 int field_index() const { return _field_index; } 767 char field_type() const { return _field_type; } 768 int field_offset() const { return _field_offset; } 769 }; 770 771 class ClassFieldMap: public CHeapObj<mtInternal> { 772 private: 773 enum { 774 initial_field_count = 5 775 }; 776 777 // list of field descriptors 778 GrowableArray<ClassFieldDescriptor*>* _fields; 779 780 // constructor 781 ClassFieldMap(); 782 783 // add a field 784 void add(int index, char type, int offset); 785 786 // returns the field count for the given class 787 static int compute_field_count(instanceKlassHandle ikh); 788 789 public: 790 ~ClassFieldMap(); 791 792 // access 793 int field_count() { return _fields->length(); } 794 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); } 795 796 // functions to create maps of static or instance fields 797 static ClassFieldMap* create_map_of_static_fields(Klass* k); 798 static ClassFieldMap* create_map_of_instance_fields(oop obj); 799 }; 800 801 ClassFieldMap::ClassFieldMap() { 802 _fields = new (ResourceObj::C_HEAP, mtInternal) 803 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true); 804 } 805 806 ClassFieldMap::~ClassFieldMap() { 807 for (int i=0; i<_fields->length(); i++) { 808 delete _fields->at(i); 809 } 810 delete _fields; 811 } 812 813 void ClassFieldMap::add(int index, char type, int offset) { 814 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset); 815 _fields->append(field); 816 } 817 818 // Returns a heap allocated ClassFieldMap to describe the static fields 819 // of the given class. 820 // 821 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) { 822 HandleMark hm; 823 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k); 824 825 // create the field map 826 ClassFieldMap* field_map = new ClassFieldMap(); 827 828 FilteredFieldStream f(ikh, false, false); 829 int max_field_index = f.field_count()-1; 830 831 int index = 0; 832 for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) { 833 // ignore instance fields 834 if (!fld.access_flags().is_static()) { 835 continue; 836 } 837 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset()); 838 } 839 return field_map; 840 } 841 842 // Returns a heap allocated ClassFieldMap to describe the instance fields 843 // of the given class. All instance fields are included (this means public 844 // and private fields declared in superclasses and superinterfaces too). 845 // 846 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) { 847 HandleMark hm; 848 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass()); 849 850 // create the field map 851 ClassFieldMap* field_map = new ClassFieldMap(); 852 853 FilteredFieldStream f(ikh, false, false); 854 855 int max_field_index = f.field_count()-1; 856 857 int index = 0; 858 for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) { 859 // ignore static fields 860 if (fld.access_flags().is_static()) { 861 continue; 862 } 863 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset()); 864 } 865 866 return field_map; 867 } 868 869 // Helper class used to cache a ClassFileMap for the instance fields of 870 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during 871 // heap iteration and avoid creating a field map for each object in the heap 872 // (only need to create the map when the first instance of a class is encountered). 873 // 874 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> { 875 private: 876 enum { 877 initial_class_count = 200 878 }; 879 ClassFieldMap* _field_map; 880 881 ClassFieldMap* field_map() const { return _field_map; } 882 883 JvmtiCachedClassFieldMap(ClassFieldMap* field_map); 884 ~JvmtiCachedClassFieldMap(); 885 886 static GrowableArray<InstanceKlass*>* _class_list; 887 static void add_to_class_list(InstanceKlass* ik); 888 889 public: 890 // returns the field map for a given object (returning map cached 891 // by InstanceKlass if possible 892 static ClassFieldMap* get_map_of_instance_fields(oop obj); 893 894 // removes the field map from all instanceKlasses - should be 895 // called before VM operation completes 896 static void clear_cache(); 897 898 // returns the number of ClassFieldMap cached by instanceKlasses 899 static int cached_field_map_count(); 900 }; 901 902 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list; 903 904 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) { 905 _field_map = field_map; 906 } 907 908 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() { 909 if (_field_map != NULL) { 910 delete _field_map; 911 } 912 } 913 914 // Marker class to ensure that the class file map cache is only used in a defined 915 // scope. 916 class ClassFieldMapCacheMark : public StackObj { 917 private: 918 static bool _is_active; 919 public: 920 ClassFieldMapCacheMark() { 921 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 922 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty"); 923 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested"); 924 _is_active = true; 925 } 926 ~ClassFieldMapCacheMark() { 927 JvmtiCachedClassFieldMap::clear_cache(); 928 _is_active = false; 929 } 930 static bool is_active() { return _is_active; } 931 }; 932 933 bool ClassFieldMapCacheMark::_is_active; 934 935 936 // record that the given InstanceKlass is caching a field map 937 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) { 938 if (_class_list == NULL) { 939 _class_list = new (ResourceObj::C_HEAP, mtInternal) 940 GrowableArray<InstanceKlass*>(initial_class_count, true); 941 } 942 _class_list->push(ik); 943 } 944 945 // returns the instance field map for the given object 946 // (returns field map cached by the InstanceKlass if possible) 947 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) { 948 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 949 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active"); 950 951 Klass* k = obj->klass(); 952 InstanceKlass* ik = InstanceKlass::cast(k); 953 954 // return cached map if possible 955 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 956 if (cached_map != NULL) { 957 assert(cached_map->field_map() != NULL, "missing field list"); 958 return cached_map->field_map(); 959 } else { 960 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj); 961 cached_map = new JvmtiCachedClassFieldMap(field_map); 962 ik->set_jvmti_cached_class_field_map(cached_map); 963 add_to_class_list(ik); 964 return field_map; 965 } 966 } 967 968 // remove the fields maps cached from all instanceKlasses 969 void JvmtiCachedClassFieldMap::clear_cache() { 970 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 971 if (_class_list != NULL) { 972 for (int i = 0; i < _class_list->length(); i++) { 973 InstanceKlass* ik = _class_list->at(i); 974 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 975 assert(cached_map != NULL, "should not be NULL"); 976 ik->set_jvmti_cached_class_field_map(NULL); 977 delete cached_map; // deletes the encapsulated field map 978 } 979 delete _class_list; 980 _class_list = NULL; 981 } 982 } 983 984 // returns the number of ClassFieldMap cached by instanceKlasses 985 int JvmtiCachedClassFieldMap::cached_field_map_count() { 986 return (_class_list == NULL) ? 0 : _class_list->length(); 987 } 988 989 // helper function to indicate if an object is filtered by its tag or class tag 990 static inline bool is_filtered_by_heap_filter(jlong obj_tag, 991 jlong klass_tag, 992 int heap_filter) { 993 // apply the heap filter 994 if (obj_tag != 0) { 995 // filter out tagged objects 996 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true; 997 } else { 998 // filter out untagged objects 999 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true; 1000 } 1001 if (klass_tag != 0) { 1002 // filter out objects with tagged classes 1003 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true; 1004 } else { 1005 // filter out objects with untagged classes. 1006 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true; 1007 } 1008 return false; 1009 } 1010 1011 // helper function to indicate if an object is filtered by a klass filter 1012 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) { 1013 if (!klass_filter.is_null()) { 1014 if (obj->klass() != klass_filter()) { 1015 return true; 1016 } 1017 } 1018 return false; 1019 } 1020 1021 // helper function to tell if a field is a primitive field or not 1022 static inline bool is_primitive_field_type(char type) { 1023 return (type != 'L' && type != '['); 1024 } 1025 1026 // helper function to copy the value from location addr to jvalue. 1027 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) { 1028 switch (value_type) { 1029 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; } 1030 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; } 1031 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; } 1032 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; } 1033 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; } 1034 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; } 1035 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; } 1036 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; } 1037 default: ShouldNotReachHere(); 1038 } 1039 } 1040 1041 // helper function to invoke string primitive value callback 1042 // returns visit control flags 1043 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb, 1044 CallbackWrapper* wrapper, 1045 oop str, 1046 void* user_data) 1047 { 1048 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 1049 1050 typeArrayOop s_value = java_lang_String::value(str); 1051 1052 // JDK-6584008: the value field may be null if a String instance is 1053 // partially constructed. 1054 if (s_value == NULL) { 1055 return 0; 1056 } 1057 // get the string value and length 1058 // (string value may be offset from the base) 1059 int s_len = java_lang_String::length(str); 1060 bool is_latin1 = java_lang_String::is_latin1(str); 1061 jchar* value; 1062 if (s_len > 0) { 1063 if (!is_latin1) { 1064 value = s_value->char_at_addr(0); 1065 } else { 1066 // Inflate latin1 encoded string to UTF16 1067 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal); 1068 for (int i = 0; i < s_len; i++) { 1069 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff; 1070 } 1071 value = &buf[0]; 1072 } 1073 } else { 1074 // Don't use char_at_addr(0) if length is 0 1075 value = (jchar*) s_value->base(T_CHAR); 1076 } 1077 1078 // invoke the callback 1079 jint res = (*cb)(wrapper->klass_tag(), 1080 wrapper->obj_size(), 1081 wrapper->obj_tag_p(), 1082 value, 1083 (jint)s_len, 1084 user_data); 1085 1086 if (is_latin1 && s_len > 0) { 1087 FREE_C_HEAP_ARRAY(jchar, value); 1088 } 1089 return res; 1090 } 1091 1092 // helper function to invoke string primitive value callback 1093 // returns visit control flags 1094 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb, 1095 CallbackWrapper* wrapper, 1096 oop obj, 1097 void* user_data) 1098 { 1099 assert(obj->is_typeArray(), "not a primitive array"); 1100 1101 // get base address of first element 1102 typeArrayOop array = typeArrayOop(obj); 1103 BasicType type = TypeArrayKlass::cast(array->klass())->element_type(); 1104 void* elements = array->base(type); 1105 1106 // jvmtiPrimitiveType is defined so this mapping is always correct 1107 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type); 1108 1109 return (*cb)(wrapper->klass_tag(), 1110 wrapper->obj_size(), 1111 wrapper->obj_tag_p(), 1112 (jint)array->length(), 1113 elem_type, 1114 elements, 1115 user_data); 1116 } 1117 1118 // helper function to invoke the primitive field callback for all static fields 1119 // of a given class 1120 static jint invoke_primitive_field_callback_for_static_fields 1121 (CallbackWrapper* wrapper, 1122 oop obj, 1123 jvmtiPrimitiveFieldCallback cb, 1124 void* user_data) 1125 { 1126 // for static fields only the index will be set 1127 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1128 1129 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class"); 1130 if (java_lang_Class::is_primitive(obj)) { 1131 return 0; 1132 } 1133 Klass* klass = java_lang_Class::as_Klass(obj); 1134 1135 // ignore classes for object and type arrays 1136 if (!klass->oop_is_instance()) { 1137 return 0; 1138 } 1139 1140 // ignore classes which aren't linked yet 1141 InstanceKlass* ik = InstanceKlass::cast(klass); 1142 if (!ik->is_linked()) { 1143 return 0; 1144 } 1145 1146 // get the field map 1147 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 1148 1149 // invoke the callback for each static primitive field 1150 for (int i=0; i<field_map->field_count(); i++) { 1151 ClassFieldDescriptor* field = field_map->field_at(i); 1152 1153 // ignore non-primitive fields 1154 char type = field->field_type(); 1155 if (!is_primitive_field_type(type)) { 1156 continue; 1157 } 1158 // one-to-one mapping 1159 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1160 1161 // get offset and field value 1162 int offset = field->field_offset(); 1163 address addr = (address)klass->java_mirror() + offset; 1164 jvalue value; 1165 copy_to_jvalue(&value, addr, value_type); 1166 1167 // field index 1168 reference_info.field.index = field->field_index(); 1169 1170 // invoke the callback 1171 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 1172 &reference_info, 1173 wrapper->klass_tag(), 1174 wrapper->obj_tag_p(), 1175 value, 1176 value_type, 1177 user_data); 1178 if (res & JVMTI_VISIT_ABORT) { 1179 delete field_map; 1180 return res; 1181 } 1182 } 1183 1184 delete field_map; 1185 return 0; 1186 } 1187 1188 // helper function to invoke the primitive field callback for all instance fields 1189 // of a given object 1190 static jint invoke_primitive_field_callback_for_instance_fields( 1191 CallbackWrapper* wrapper, 1192 oop obj, 1193 jvmtiPrimitiveFieldCallback cb, 1194 void* user_data) 1195 { 1196 // for instance fields only the index will be set 1197 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1198 1199 // get the map of the instance fields 1200 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj); 1201 1202 // invoke the callback for each instance primitive field 1203 for (int i=0; i<fields->field_count(); i++) { 1204 ClassFieldDescriptor* field = fields->field_at(i); 1205 1206 // ignore non-primitive fields 1207 char type = field->field_type(); 1208 if (!is_primitive_field_type(type)) { 1209 continue; 1210 } 1211 // one-to-one mapping 1212 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1213 1214 // get offset and field value 1215 int offset = field->field_offset(); 1216 address addr = (address)obj + offset; 1217 jvalue value; 1218 copy_to_jvalue(&value, addr, value_type); 1219 1220 // field index 1221 reference_info.field.index = field->field_index(); 1222 1223 // invoke the callback 1224 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD, 1225 &reference_info, 1226 wrapper->klass_tag(), 1227 wrapper->obj_tag_p(), 1228 value, 1229 value_type, 1230 user_data); 1231 if (res & JVMTI_VISIT_ABORT) { 1232 return res; 1233 } 1234 } 1235 return 0; 1236 } 1237 1238 1239 // VM operation to iterate over all objects in the heap (both reachable 1240 // and unreachable) 1241 class VM_HeapIterateOperation: public VM_Operation { 1242 private: 1243 ObjectClosure* _blk; 1244 public: 1245 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; } 1246 1247 VMOp_Type type() const { return VMOp_HeapIterateOperation; } 1248 void doit() { 1249 // allows class files maps to be cached during iteration 1250 ClassFieldMapCacheMark cm; 1251 1252 // make sure that heap is parsable (fills TLABs with filler objects) 1253 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1254 1255 // Verify heap before iteration - if the heap gets corrupted then 1256 // JVMTI's IterateOverHeap will crash. 1257 if (VerifyBeforeIteration) { 1258 Universe::verify(); 1259 } 1260 1261 // do the iteration 1262 // If this operation encounters a bad object when using CMS, 1263 // consider using safe_object_iterate() which avoids perm gen 1264 // objects that may contain bad references. 1265 Universe::heap()->object_iterate(_blk); 1266 } 1267 1268 }; 1269 1270 1271 // An ObjectClosure used to support the deprecated IterateOverHeap and 1272 // IterateOverInstancesOfClass functions 1273 class IterateOverHeapObjectClosure: public ObjectClosure { 1274 private: 1275 JvmtiTagMap* _tag_map; 1276 KlassHandle _klass; 1277 jvmtiHeapObjectFilter _object_filter; 1278 jvmtiHeapObjectCallback _heap_object_callback; 1279 const void* _user_data; 1280 1281 // accessors 1282 JvmtiTagMap* tag_map() const { return _tag_map; } 1283 jvmtiHeapObjectFilter object_filter() const { return _object_filter; } 1284 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; } 1285 KlassHandle klass() const { return _klass; } 1286 const void* user_data() const { return _user_data; } 1287 1288 // indicates if iteration has been aborted 1289 bool _iteration_aborted; 1290 bool is_iteration_aborted() const { return _iteration_aborted; } 1291 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; } 1292 1293 public: 1294 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map, 1295 KlassHandle klass, 1296 jvmtiHeapObjectFilter object_filter, 1297 jvmtiHeapObjectCallback heap_object_callback, 1298 const void* user_data) : 1299 _tag_map(tag_map), 1300 _klass(klass), 1301 _object_filter(object_filter), 1302 _heap_object_callback(heap_object_callback), 1303 _user_data(user_data), 1304 _iteration_aborted(false) 1305 { 1306 } 1307 1308 void do_object(oop o); 1309 }; 1310 1311 // invoked for each object in the heap 1312 void IterateOverHeapObjectClosure::do_object(oop o) { 1313 // check if iteration has been halted 1314 if (is_iteration_aborted()) return; 1315 1316 // ignore any objects that aren't visible to profiler 1317 if (!ServiceUtil::visible_oop(o)) return; 1318 1319 // instanceof check when filtering by klass 1320 if (!klass().is_null() && !o->is_a(klass()())) { 1321 return; 1322 } 1323 // prepare for the calllback 1324 CallbackWrapper wrapper(tag_map(), o); 1325 1326 // if the object is tagged and we're only interested in untagged objects 1327 // then don't invoke the callback. Similiarly, if the object is untagged 1328 // and we're only interested in tagged objects we skip the callback. 1329 if (wrapper.obj_tag() != 0) { 1330 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return; 1331 } else { 1332 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return; 1333 } 1334 1335 // invoke the agent's callback 1336 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(), 1337 wrapper.obj_size(), 1338 wrapper.obj_tag_p(), 1339 (void*)user_data()); 1340 if (control == JVMTI_ITERATION_ABORT) { 1341 set_iteration_aborted(true); 1342 } 1343 } 1344 1345 // An ObjectClosure used to support the IterateThroughHeap function 1346 class IterateThroughHeapObjectClosure: public ObjectClosure { 1347 private: 1348 JvmtiTagMap* _tag_map; 1349 KlassHandle _klass; 1350 int _heap_filter; 1351 const jvmtiHeapCallbacks* _callbacks; 1352 const void* _user_data; 1353 1354 // accessor functions 1355 JvmtiTagMap* tag_map() const { return _tag_map; } 1356 int heap_filter() const { return _heap_filter; } 1357 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; } 1358 KlassHandle klass() const { return _klass; } 1359 const void* user_data() const { return _user_data; } 1360 1361 // indicates if the iteration has been aborted 1362 bool _iteration_aborted; 1363 bool is_iteration_aborted() const { return _iteration_aborted; } 1364 1365 // used to check the visit control flags. If the abort flag is set 1366 // then we set the iteration aborted flag so that the iteration completes 1367 // without processing any further objects 1368 bool check_flags_for_abort(jint flags) { 1369 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0; 1370 if (is_abort) { 1371 _iteration_aborted = true; 1372 } 1373 return is_abort; 1374 } 1375 1376 public: 1377 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map, 1378 KlassHandle klass, 1379 int heap_filter, 1380 const jvmtiHeapCallbacks* heap_callbacks, 1381 const void* user_data) : 1382 _tag_map(tag_map), 1383 _klass(klass), 1384 _heap_filter(heap_filter), 1385 _callbacks(heap_callbacks), 1386 _user_data(user_data), 1387 _iteration_aborted(false) 1388 { 1389 } 1390 1391 void do_object(oop o); 1392 }; 1393 1394 // invoked for each object in the heap 1395 void IterateThroughHeapObjectClosure::do_object(oop obj) { 1396 // check if iteration has been halted 1397 if (is_iteration_aborted()) return; 1398 1399 // ignore any objects that aren't visible to profiler 1400 if (!ServiceUtil::visible_oop(obj)) return; 1401 1402 // apply class filter 1403 if (is_filtered_by_klass_filter(obj, klass())) return; 1404 1405 // prepare for callback 1406 CallbackWrapper wrapper(tag_map(), obj); 1407 1408 // check if filtered by the heap filter 1409 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) { 1410 return; 1411 } 1412 1413 // for arrays we need the length, otherwise -1 1414 bool is_array = obj->is_array(); 1415 int len = is_array ? arrayOop(obj)->length() : -1; 1416 1417 // invoke the object callback (if callback is provided) 1418 if (callbacks()->heap_iteration_callback != NULL) { 1419 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback; 1420 jint res = (*cb)(wrapper.klass_tag(), 1421 wrapper.obj_size(), 1422 wrapper.obj_tag_p(), 1423 (jint)len, 1424 (void*)user_data()); 1425 if (check_flags_for_abort(res)) return; 1426 } 1427 1428 // for objects and classes we report primitive fields if callback provided 1429 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) { 1430 jint res; 1431 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback; 1432 if (obj->klass() == SystemDictionary::Class_klass()) { 1433 res = invoke_primitive_field_callback_for_static_fields(&wrapper, 1434 obj, 1435 cb, 1436 (void*)user_data()); 1437 } else { 1438 res = invoke_primitive_field_callback_for_instance_fields(&wrapper, 1439 obj, 1440 cb, 1441 (void*)user_data()); 1442 } 1443 if (check_flags_for_abort(res)) return; 1444 } 1445 1446 // string callback 1447 if (!is_array && 1448 callbacks()->string_primitive_value_callback != NULL && 1449 obj->klass() == SystemDictionary::String_klass()) { 1450 jint res = invoke_string_value_callback( 1451 callbacks()->string_primitive_value_callback, 1452 &wrapper, 1453 obj, 1454 (void*)user_data() ); 1455 if (check_flags_for_abort(res)) return; 1456 } 1457 1458 // array callback 1459 if (is_array && 1460 callbacks()->array_primitive_value_callback != NULL && 1461 obj->is_typeArray()) { 1462 jint res = invoke_array_primitive_value_callback( 1463 callbacks()->array_primitive_value_callback, 1464 &wrapper, 1465 obj, 1466 (void*)user_data() ); 1467 if (check_flags_for_abort(res)) return; 1468 } 1469 }; 1470 1471 1472 // Deprecated function to iterate over all objects in the heap 1473 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter, 1474 KlassHandle klass, 1475 jvmtiHeapObjectCallback heap_object_callback, 1476 const void* user_data) 1477 { 1478 MutexLocker ml(Heap_lock); 1479 IterateOverHeapObjectClosure blk(this, 1480 klass, 1481 object_filter, 1482 heap_object_callback, 1483 user_data); 1484 VM_HeapIterateOperation op(&blk); 1485 VMThread::execute(&op); 1486 } 1487 1488 1489 // Iterates over all objects in the heap 1490 void JvmtiTagMap::iterate_through_heap(jint heap_filter, 1491 KlassHandle klass, 1492 const jvmtiHeapCallbacks* callbacks, 1493 const void* user_data) 1494 { 1495 MutexLocker ml(Heap_lock); 1496 IterateThroughHeapObjectClosure blk(this, 1497 klass, 1498 heap_filter, 1499 callbacks, 1500 user_data); 1501 VM_HeapIterateOperation op(&blk); 1502 VMThread::execute(&op); 1503 } 1504 1505 // support class for get_objects_with_tags 1506 1507 class TagObjectCollector : public JvmtiTagHashmapEntryClosure { 1508 private: 1509 JvmtiEnv* _env; 1510 jlong* _tags; 1511 jint _tag_count; 1512 1513 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs) 1514 GrowableArray<uint64_t>* _tag_results; // collected tags 1515 1516 public: 1517 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) { 1518 _env = env; 1519 _tags = (jlong*)tags; 1520 _tag_count = tag_count; 1521 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true); 1522 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true); 1523 } 1524 1525 ~TagObjectCollector() { 1526 delete _object_results; 1527 delete _tag_results; 1528 } 1529 1530 // for each tagged object check if the tag value matches 1531 // - if it matches then we create a JNI local reference to the object 1532 // and record the reference and tag value. 1533 // 1534 void do_entry(JvmtiTagHashmapEntry* entry) { 1535 for (int i=0; i<_tag_count; i++) { 1536 if (_tags[i] == entry->tag()) { 1537 oop o = entry->object(); 1538 assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check"); 1539 jobject ref = JNIHandles::make_local(JavaThread::current(), o); 1540 _object_results->append(ref); 1541 _tag_results->append((uint64_t)entry->tag()); 1542 } 1543 } 1544 } 1545 1546 // return the results from the collection 1547 // 1548 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1549 jvmtiError error; 1550 int count = _object_results->length(); 1551 assert(count >= 0, "sanity check"); 1552 1553 // if object_result_ptr is not NULL then allocate the result and copy 1554 // in the object references. 1555 if (object_result_ptr != NULL) { 1556 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr); 1557 if (error != JVMTI_ERROR_NONE) { 1558 return error; 1559 } 1560 for (int i=0; i<count; i++) { 1561 (*object_result_ptr)[i] = _object_results->at(i); 1562 } 1563 } 1564 1565 // if tag_result_ptr is not NULL then allocate the result and copy 1566 // in the tag values. 1567 if (tag_result_ptr != NULL) { 1568 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr); 1569 if (error != JVMTI_ERROR_NONE) { 1570 if (object_result_ptr != NULL) { 1571 _env->Deallocate((unsigned char*)object_result_ptr); 1572 } 1573 return error; 1574 } 1575 for (int i=0; i<count; i++) { 1576 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i); 1577 } 1578 } 1579 1580 *count_ptr = count; 1581 return JVMTI_ERROR_NONE; 1582 } 1583 }; 1584 1585 // return the list of objects with the specified tags 1586 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags, 1587 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1588 1589 TagObjectCollector collector(env(), tags, count); 1590 { 1591 // iterate over all tagged objects 1592 MutexLocker ml(lock()); 1593 entry_iterate(&collector); 1594 } 1595 return collector.result(count_ptr, object_result_ptr, tag_result_ptr); 1596 } 1597 1598 1599 // ObjectMarker is used to support the marking objects when walking the 1600 // heap. 1601 // 1602 // This implementation uses the existing mark bits in an object for 1603 // marking. Objects that are marked must later have their headers restored. 1604 // As most objects are unlocked and don't have their identity hash computed 1605 // we don't have to save their headers. Instead we save the headers that 1606 // are "interesting". Later when the headers are restored this implementation 1607 // restores all headers to their initial value and then restores the few 1608 // objects that had interesting headers. 1609 // 1610 // Future work: This implementation currently uses growable arrays to save 1611 // the oop and header of interesting objects. As an optimization we could 1612 // use the same technique as the GC and make use of the unused area 1613 // between top() and end(). 1614 // 1615 1616 // An ObjectClosure used to restore the mark bits of an object 1617 class RestoreMarksClosure : public ObjectClosure { 1618 public: 1619 void do_object(oop o) { 1620 if (o != NULL) { 1621 markOop mark = o->mark(); 1622 if (mark->is_marked()) { 1623 o->init_mark(); 1624 } 1625 } 1626 } 1627 }; 1628 1629 // ObjectMarker provides the mark and visited functions 1630 class ObjectMarker : AllStatic { 1631 private: 1632 // saved headers 1633 static GrowableArray<oop>* _saved_oop_stack; 1634 static GrowableArray<markOop>* _saved_mark_stack; 1635 static bool _needs_reset; // do we need to reset mark bits? 1636 1637 public: 1638 static void init(); // initialize 1639 static void done(); // clean-up 1640 1641 static inline void mark(oop o); // mark an object 1642 static inline bool visited(oop o); // check if object has been visited 1643 1644 static inline bool needs_reset() { return _needs_reset; } 1645 static inline void set_needs_reset(bool v) { _needs_reset = v; } 1646 }; 1647 1648 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL; 1649 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL; 1650 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default 1651 1652 // initialize ObjectMarker - prepares for object marking 1653 void ObjectMarker::init() { 1654 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 1655 1656 // prepare heap for iteration 1657 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1658 1659 // create stacks for interesting headers 1660 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true); 1661 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true); 1662 1663 if (UseBiasedLocking) { 1664 BiasedLocking::preserve_marks(); 1665 } 1666 } 1667 1668 // Object marking is done so restore object headers 1669 void ObjectMarker::done() { 1670 // iterate over all objects and restore the mark bits to 1671 // their initial value 1672 RestoreMarksClosure blk; 1673 if (needs_reset()) { 1674 Universe::heap()->object_iterate(&blk); 1675 } else { 1676 // We don't need to reset mark bits on this call, but reset the 1677 // flag to the default for the next call. 1678 set_needs_reset(true); 1679 } 1680 1681 // now restore the interesting headers 1682 for (int i = 0; i < _saved_oop_stack->length(); i++) { 1683 oop o = _saved_oop_stack->at(i); 1684 markOop mark = _saved_mark_stack->at(i); 1685 o->set_mark(mark); 1686 } 1687 1688 if (UseBiasedLocking) { 1689 BiasedLocking::restore_marks(); 1690 } 1691 1692 // free the stacks 1693 delete _saved_oop_stack; 1694 delete _saved_mark_stack; 1695 } 1696 1697 // mark an object 1698 inline void ObjectMarker::mark(oop o) { 1699 assert(Universe::heap()->is_in(o), "sanity check"); 1700 assert(!o->mark()->is_marked(), "should only mark an object once"); 1701 1702 // object's mark word 1703 markOop mark = o->mark(); 1704 1705 if (mark->must_be_preserved(o)) { 1706 _saved_mark_stack->push(mark); 1707 _saved_oop_stack->push(o); 1708 } 1709 1710 // mark the object 1711 o->set_mark(markOopDesc::prototype()->set_marked()); 1712 } 1713 1714 // return true if object is marked 1715 inline bool ObjectMarker::visited(oop o) { 1716 return o->mark()->is_marked(); 1717 } 1718 1719 // Stack allocated class to help ensure that ObjectMarker is used 1720 // correctly. Constructor initializes ObjectMarker, destructor calls 1721 // ObjectMarker's done() function to restore object headers. 1722 class ObjectMarkerController : public StackObj { 1723 public: 1724 ObjectMarkerController() { 1725 ObjectMarker::init(); 1726 } 1727 ~ObjectMarkerController() { 1728 ObjectMarker::done(); 1729 } 1730 }; 1731 1732 1733 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind 1734 // (not performance critical as only used for roots) 1735 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) { 1736 switch (kind) { 1737 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL; 1738 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS; 1739 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR; 1740 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL; 1741 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL; 1742 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD; 1743 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER; 1744 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER; 1745 } 1746 } 1747 1748 // Base class for all heap walk contexts. The base class maintains a flag 1749 // to indicate if the context is valid or not. 1750 class HeapWalkContext VALUE_OBJ_CLASS_SPEC { 1751 private: 1752 bool _valid; 1753 public: 1754 HeapWalkContext(bool valid) { _valid = valid; } 1755 void invalidate() { _valid = false; } 1756 bool is_valid() const { return _valid; } 1757 }; 1758 1759 // A basic heap walk context for the deprecated heap walking functions. 1760 // The context for a basic heap walk are the callbacks and fields used by 1761 // the referrer caching scheme. 1762 class BasicHeapWalkContext: public HeapWalkContext { 1763 private: 1764 jvmtiHeapRootCallback _heap_root_callback; 1765 jvmtiStackReferenceCallback _stack_ref_callback; 1766 jvmtiObjectReferenceCallback _object_ref_callback; 1767 1768 // used for caching 1769 oop _last_referrer; 1770 jlong _last_referrer_tag; 1771 1772 public: 1773 BasicHeapWalkContext() : HeapWalkContext(false) { } 1774 1775 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback, 1776 jvmtiStackReferenceCallback stack_ref_callback, 1777 jvmtiObjectReferenceCallback object_ref_callback) : 1778 HeapWalkContext(true), 1779 _heap_root_callback(heap_root_callback), 1780 _stack_ref_callback(stack_ref_callback), 1781 _object_ref_callback(object_ref_callback), 1782 _last_referrer(NULL), 1783 _last_referrer_tag(0) { 1784 } 1785 1786 // accessors 1787 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; } 1788 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; } 1789 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; } 1790 1791 oop last_referrer() const { return _last_referrer; } 1792 void set_last_referrer(oop referrer) { _last_referrer = referrer; } 1793 jlong last_referrer_tag() const { return _last_referrer_tag; } 1794 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; } 1795 }; 1796 1797 // The advanced heap walk context for the FollowReferences functions. 1798 // The context is the callbacks, and the fields used for filtering. 1799 class AdvancedHeapWalkContext: public HeapWalkContext { 1800 private: 1801 jint _heap_filter; 1802 KlassHandle _klass_filter; 1803 const jvmtiHeapCallbacks* _heap_callbacks; 1804 1805 public: 1806 AdvancedHeapWalkContext() : HeapWalkContext(false) { } 1807 1808 AdvancedHeapWalkContext(jint heap_filter, 1809 KlassHandle klass_filter, 1810 const jvmtiHeapCallbacks* heap_callbacks) : 1811 HeapWalkContext(true), 1812 _heap_filter(heap_filter), 1813 _klass_filter(klass_filter), 1814 _heap_callbacks(heap_callbacks) { 1815 } 1816 1817 // accessors 1818 jint heap_filter() const { return _heap_filter; } 1819 KlassHandle klass_filter() const { return _klass_filter; } 1820 1821 const jvmtiHeapReferenceCallback heap_reference_callback() const { 1822 return _heap_callbacks->heap_reference_callback; 1823 }; 1824 const jvmtiPrimitiveFieldCallback primitive_field_callback() const { 1825 return _heap_callbacks->primitive_field_callback; 1826 } 1827 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const { 1828 return _heap_callbacks->array_primitive_value_callback; 1829 } 1830 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const { 1831 return _heap_callbacks->string_primitive_value_callback; 1832 } 1833 }; 1834 1835 // The CallbackInvoker is a class with static functions that the heap walk can call 1836 // into to invoke callbacks. It works in one of two modes. The "basic" mode is 1837 // used for the deprecated IterateOverReachableObjects functions. The "advanced" 1838 // mode is for the newer FollowReferences function which supports a lot of 1839 // additional callbacks. 1840 class CallbackInvoker : AllStatic { 1841 private: 1842 // heap walk styles 1843 enum { basic, advanced }; 1844 static int _heap_walk_type; 1845 static bool is_basic_heap_walk() { return _heap_walk_type == basic; } 1846 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; } 1847 1848 // context for basic style heap walk 1849 static BasicHeapWalkContext _basic_context; 1850 static BasicHeapWalkContext* basic_context() { 1851 assert(_basic_context.is_valid(), "invalid"); 1852 return &_basic_context; 1853 } 1854 1855 // context for advanced style heap walk 1856 static AdvancedHeapWalkContext _advanced_context; 1857 static AdvancedHeapWalkContext* advanced_context() { 1858 assert(_advanced_context.is_valid(), "invalid"); 1859 return &_advanced_context; 1860 } 1861 1862 // context needed for all heap walks 1863 static JvmtiTagMap* _tag_map; 1864 static const void* _user_data; 1865 static GrowableArray<oop>* _visit_stack; 1866 1867 // accessors 1868 static JvmtiTagMap* tag_map() { return _tag_map; } 1869 static const void* user_data() { return _user_data; } 1870 static GrowableArray<oop>* visit_stack() { return _visit_stack; } 1871 1872 // if the object hasn't been visited then push it onto the visit stack 1873 // so that it will be visited later 1874 static inline bool check_for_visit(oop obj) { 1875 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj); 1876 return true; 1877 } 1878 1879 // invoke basic style callbacks 1880 static inline bool invoke_basic_heap_root_callback 1881 (jvmtiHeapRootKind root_kind, oop obj); 1882 static inline bool invoke_basic_stack_ref_callback 1883 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method, 1884 int slot, oop obj); 1885 static inline bool invoke_basic_object_reference_callback 1886 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index); 1887 1888 // invoke advanced style callbacks 1889 static inline bool invoke_advanced_heap_root_callback 1890 (jvmtiHeapReferenceKind ref_kind, oop obj); 1891 static inline bool invoke_advanced_stack_ref_callback 1892 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth, 1893 jmethodID method, jlocation bci, jint slot, oop obj); 1894 static inline bool invoke_advanced_object_reference_callback 1895 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index); 1896 1897 // used to report the value of primitive fields 1898 static inline bool report_primitive_field 1899 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type); 1900 1901 public: 1902 // initialize for basic mode 1903 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1904 GrowableArray<oop>* visit_stack, 1905 const void* user_data, 1906 BasicHeapWalkContext context); 1907 1908 // initialize for advanced mode 1909 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1910 GrowableArray<oop>* visit_stack, 1911 const void* user_data, 1912 AdvancedHeapWalkContext context); 1913 1914 // functions to report roots 1915 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o); 1916 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth, 1917 jmethodID m, oop o); 1918 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth, 1919 jmethodID method, jlocation bci, jint slot, oop o); 1920 1921 // functions to report references 1922 static inline bool report_array_element_reference(oop referrer, oop referree, jint index); 1923 static inline bool report_class_reference(oop referrer, oop referree); 1924 static inline bool report_class_loader_reference(oop referrer, oop referree); 1925 static inline bool report_signers_reference(oop referrer, oop referree); 1926 static inline bool report_protection_domain_reference(oop referrer, oop referree); 1927 static inline bool report_superclass_reference(oop referrer, oop referree); 1928 static inline bool report_interface_reference(oop referrer, oop referree); 1929 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot); 1930 static inline bool report_field_reference(oop referrer, oop referree, jint slot); 1931 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index); 1932 static inline bool report_primitive_array_values(oop array); 1933 static inline bool report_string_value(oop str); 1934 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type); 1935 static inline bool report_primitive_static_field(oop o, jint index, address value, char type); 1936 }; 1937 1938 // statics 1939 int CallbackInvoker::_heap_walk_type; 1940 BasicHeapWalkContext CallbackInvoker::_basic_context; 1941 AdvancedHeapWalkContext CallbackInvoker::_advanced_context; 1942 JvmtiTagMap* CallbackInvoker::_tag_map; 1943 const void* CallbackInvoker::_user_data; 1944 GrowableArray<oop>* CallbackInvoker::_visit_stack; 1945 1946 // initialize for basic heap walk (IterateOverReachableObjects et al) 1947 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1948 GrowableArray<oop>* visit_stack, 1949 const void* user_data, 1950 BasicHeapWalkContext context) { 1951 _tag_map = tag_map; 1952 _visit_stack = visit_stack; 1953 _user_data = user_data; 1954 _basic_context = context; 1955 _advanced_context.invalidate(); // will trigger assertion if used 1956 _heap_walk_type = basic; 1957 } 1958 1959 // initialize for advanced heap walk (FollowReferences) 1960 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1961 GrowableArray<oop>* visit_stack, 1962 const void* user_data, 1963 AdvancedHeapWalkContext context) { 1964 _tag_map = tag_map; 1965 _visit_stack = visit_stack; 1966 _user_data = user_data; 1967 _advanced_context = context; 1968 _basic_context.invalidate(); // will trigger assertion if used 1969 _heap_walk_type = advanced; 1970 } 1971 1972 1973 // invoke basic style heap root callback 1974 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) { 1975 assert(ServiceUtil::visible_oop(obj), "checking"); 1976 1977 // if we heap roots should be reported 1978 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback(); 1979 if (cb == NULL) { 1980 return check_for_visit(obj); 1981 } 1982 1983 CallbackWrapper wrapper(tag_map(), obj); 1984 jvmtiIterationControl control = (*cb)(root_kind, 1985 wrapper.klass_tag(), 1986 wrapper.obj_size(), 1987 wrapper.obj_tag_p(), 1988 (void*)user_data()); 1989 // push root to visit stack when following references 1990 if (control == JVMTI_ITERATION_CONTINUE && 1991 basic_context()->object_ref_callback() != NULL) { 1992 visit_stack()->push(obj); 1993 } 1994 return control != JVMTI_ITERATION_ABORT; 1995 } 1996 1997 // invoke basic style stack ref callback 1998 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind, 1999 jlong thread_tag, 2000 jint depth, 2001 jmethodID method, 2002 jint slot, 2003 oop obj) { 2004 assert(ServiceUtil::visible_oop(obj), "checking"); 2005 2006 // if we stack refs should be reported 2007 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback(); 2008 if (cb == NULL) { 2009 return check_for_visit(obj); 2010 } 2011 2012 CallbackWrapper wrapper(tag_map(), obj); 2013 jvmtiIterationControl control = (*cb)(root_kind, 2014 wrapper.klass_tag(), 2015 wrapper.obj_size(), 2016 wrapper.obj_tag_p(), 2017 thread_tag, 2018 depth, 2019 method, 2020 slot, 2021 (void*)user_data()); 2022 // push root to visit stack when following references 2023 if (control == JVMTI_ITERATION_CONTINUE && 2024 basic_context()->object_ref_callback() != NULL) { 2025 visit_stack()->push(obj); 2026 } 2027 return control != JVMTI_ITERATION_ABORT; 2028 } 2029 2030 // invoke basic style object reference callback 2031 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind, 2032 oop referrer, 2033 oop referree, 2034 jint index) { 2035 2036 assert(ServiceUtil::visible_oop(referrer), "checking"); 2037 assert(ServiceUtil::visible_oop(referree), "checking"); 2038 2039 BasicHeapWalkContext* context = basic_context(); 2040 2041 // callback requires the referrer's tag. If it's the same referrer 2042 // as the last call then we use the cached value. 2043 jlong referrer_tag; 2044 if (referrer == context->last_referrer()) { 2045 referrer_tag = context->last_referrer_tag(); 2046 } else { 2047 referrer_tag = tag_for(tag_map(), referrer); 2048 } 2049 2050 // do the callback 2051 CallbackWrapper wrapper(tag_map(), referree); 2052 jvmtiObjectReferenceCallback cb = context->object_ref_callback(); 2053 jvmtiIterationControl control = (*cb)(ref_kind, 2054 wrapper.klass_tag(), 2055 wrapper.obj_size(), 2056 wrapper.obj_tag_p(), 2057 referrer_tag, 2058 index, 2059 (void*)user_data()); 2060 2061 // record referrer and referrer tag. For self-references record the 2062 // tag value from the callback as this might differ from referrer_tag. 2063 context->set_last_referrer(referrer); 2064 if (referrer == referree) { 2065 context->set_last_referrer_tag(*wrapper.obj_tag_p()); 2066 } else { 2067 context->set_last_referrer_tag(referrer_tag); 2068 } 2069 2070 if (control == JVMTI_ITERATION_CONTINUE) { 2071 return check_for_visit(referree); 2072 } else { 2073 return control != JVMTI_ITERATION_ABORT; 2074 } 2075 } 2076 2077 // invoke advanced style heap root callback 2078 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind, 2079 oop obj) { 2080 assert(ServiceUtil::visible_oop(obj), "checking"); 2081 2082 AdvancedHeapWalkContext* context = advanced_context(); 2083 2084 // check that callback is provided 2085 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2086 if (cb == NULL) { 2087 return check_for_visit(obj); 2088 } 2089 2090 // apply class filter 2091 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2092 return check_for_visit(obj); 2093 } 2094 2095 // setup the callback wrapper 2096 CallbackWrapper wrapper(tag_map(), obj); 2097 2098 // apply tag filter 2099 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2100 wrapper.klass_tag(), 2101 context->heap_filter())) { 2102 return check_for_visit(obj); 2103 } 2104 2105 // for arrays we need the length, otherwise -1 2106 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2107 2108 // invoke the callback 2109 jint res = (*cb)(ref_kind, 2110 NULL, // referrer info 2111 wrapper.klass_tag(), 2112 0, // referrer_class_tag is 0 for heap root 2113 wrapper.obj_size(), 2114 wrapper.obj_tag_p(), 2115 NULL, // referrer_tag_p 2116 len, 2117 (void*)user_data()); 2118 if (res & JVMTI_VISIT_ABORT) { 2119 return false;// referrer class tag 2120 } 2121 if (res & JVMTI_VISIT_OBJECTS) { 2122 check_for_visit(obj); 2123 } 2124 return true; 2125 } 2126 2127 // report a reference from a thread stack to an object 2128 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind, 2129 jlong thread_tag, 2130 jlong tid, 2131 int depth, 2132 jmethodID method, 2133 jlocation bci, 2134 jint slot, 2135 oop obj) { 2136 assert(ServiceUtil::visible_oop(obj), "checking"); 2137 2138 AdvancedHeapWalkContext* context = advanced_context(); 2139 2140 // check that callback is provider 2141 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2142 if (cb == NULL) { 2143 return check_for_visit(obj); 2144 } 2145 2146 // apply class filter 2147 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2148 return check_for_visit(obj); 2149 } 2150 2151 // setup the callback wrapper 2152 CallbackWrapper wrapper(tag_map(), obj); 2153 2154 // apply tag filter 2155 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2156 wrapper.klass_tag(), 2157 context->heap_filter())) { 2158 return check_for_visit(obj); 2159 } 2160 2161 // setup the referrer info 2162 jvmtiHeapReferenceInfo reference_info; 2163 reference_info.stack_local.thread_tag = thread_tag; 2164 reference_info.stack_local.thread_id = tid; 2165 reference_info.stack_local.depth = depth; 2166 reference_info.stack_local.method = method; 2167 reference_info.stack_local.location = bci; 2168 reference_info.stack_local.slot = slot; 2169 2170 // for arrays we need the length, otherwise -1 2171 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2172 2173 // call into the agent 2174 int res = (*cb)(ref_kind, 2175 &reference_info, 2176 wrapper.klass_tag(), 2177 0, // referrer_class_tag is 0 for heap root (stack) 2178 wrapper.obj_size(), 2179 wrapper.obj_tag_p(), 2180 NULL, // referrer_tag is 0 for root 2181 len, 2182 (void*)user_data()); 2183 2184 if (res & JVMTI_VISIT_ABORT) { 2185 return false; 2186 } 2187 if (res & JVMTI_VISIT_OBJECTS) { 2188 check_for_visit(obj); 2189 } 2190 return true; 2191 } 2192 2193 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback 2194 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed. 2195 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \ 2196 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \ 2197 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \ 2198 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \ 2199 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \ 2200 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL)) 2201 2202 // invoke the object reference callback to report a reference 2203 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind, 2204 oop referrer, 2205 oop obj, 2206 jint index) 2207 { 2208 // field index is only valid field in reference_info 2209 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2210 2211 assert(ServiceUtil::visible_oop(referrer), "checking"); 2212 assert(ServiceUtil::visible_oop(obj), "checking"); 2213 2214 AdvancedHeapWalkContext* context = advanced_context(); 2215 2216 // check that callback is provider 2217 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2218 if (cb == NULL) { 2219 return check_for_visit(obj); 2220 } 2221 2222 // apply class filter 2223 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2224 return check_for_visit(obj); 2225 } 2226 2227 // setup the callback wrapper 2228 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj); 2229 2230 // apply tag filter 2231 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2232 wrapper.klass_tag(), 2233 context->heap_filter())) { 2234 return check_for_visit(obj); 2235 } 2236 2237 // field index is only valid field in reference_info 2238 reference_info.field.index = index; 2239 2240 // for arrays we need the length, otherwise -1 2241 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2242 2243 // invoke the callback 2244 int res = (*cb)(ref_kind, 2245 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL, 2246 wrapper.klass_tag(), 2247 wrapper.referrer_klass_tag(), 2248 wrapper.obj_size(), 2249 wrapper.obj_tag_p(), 2250 wrapper.referrer_tag_p(), 2251 len, 2252 (void*)user_data()); 2253 2254 if (res & JVMTI_VISIT_ABORT) { 2255 return false; 2256 } 2257 if (res & JVMTI_VISIT_OBJECTS) { 2258 check_for_visit(obj); 2259 } 2260 return true; 2261 } 2262 2263 // report a "simple root" 2264 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) { 2265 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL && 2266 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root"); 2267 assert(ServiceUtil::visible_oop(obj), "checking"); 2268 2269 if (is_basic_heap_walk()) { 2270 // map to old style root kind 2271 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind); 2272 return invoke_basic_heap_root_callback(root_kind, obj); 2273 } else { 2274 assert(is_advanced_heap_walk(), "wrong heap walk type"); 2275 return invoke_advanced_heap_root_callback(kind, obj); 2276 } 2277 } 2278 2279 2280 // invoke the primitive array values 2281 inline bool CallbackInvoker::report_primitive_array_values(oop obj) { 2282 assert(obj->is_typeArray(), "not a primitive array"); 2283 2284 AdvancedHeapWalkContext* context = advanced_context(); 2285 assert(context->array_primitive_value_callback() != NULL, "no callback"); 2286 2287 // apply class filter 2288 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2289 return true; 2290 } 2291 2292 CallbackWrapper wrapper(tag_map(), obj); 2293 2294 // apply tag filter 2295 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2296 wrapper.klass_tag(), 2297 context->heap_filter())) { 2298 return true; 2299 } 2300 2301 // invoke the callback 2302 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(), 2303 &wrapper, 2304 obj, 2305 (void*)user_data()); 2306 return (!(res & JVMTI_VISIT_ABORT)); 2307 } 2308 2309 // invoke the string value callback 2310 inline bool CallbackInvoker::report_string_value(oop str) { 2311 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 2312 2313 AdvancedHeapWalkContext* context = advanced_context(); 2314 assert(context->string_primitive_value_callback() != NULL, "no callback"); 2315 2316 // apply class filter 2317 if (is_filtered_by_klass_filter(str, context->klass_filter())) { 2318 return true; 2319 } 2320 2321 CallbackWrapper wrapper(tag_map(), str); 2322 2323 // apply tag filter 2324 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2325 wrapper.klass_tag(), 2326 context->heap_filter())) { 2327 return true; 2328 } 2329 2330 // invoke the callback 2331 int res = invoke_string_value_callback(context->string_primitive_value_callback(), 2332 &wrapper, 2333 str, 2334 (void*)user_data()); 2335 return (!(res & JVMTI_VISIT_ABORT)); 2336 } 2337 2338 // invoke the primitive field callback 2339 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind, 2340 oop obj, 2341 jint index, 2342 address addr, 2343 char type) 2344 { 2345 // for primitive fields only the index will be set 2346 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2347 2348 AdvancedHeapWalkContext* context = advanced_context(); 2349 assert(context->primitive_field_callback() != NULL, "no callback"); 2350 2351 // apply class filter 2352 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2353 return true; 2354 } 2355 2356 CallbackWrapper wrapper(tag_map(), obj); 2357 2358 // apply tag filter 2359 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2360 wrapper.klass_tag(), 2361 context->heap_filter())) { 2362 return true; 2363 } 2364 2365 // the field index in the referrer 2366 reference_info.field.index = index; 2367 2368 // map the type 2369 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 2370 2371 // setup the jvalue 2372 jvalue value; 2373 copy_to_jvalue(&value, addr, value_type); 2374 2375 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback(); 2376 int res = (*cb)(ref_kind, 2377 &reference_info, 2378 wrapper.klass_tag(), 2379 wrapper.obj_tag_p(), 2380 value, 2381 value_type, 2382 (void*)user_data()); 2383 return (!(res & JVMTI_VISIT_ABORT)); 2384 } 2385 2386 2387 // instance field 2388 inline bool CallbackInvoker::report_primitive_instance_field(oop obj, 2389 jint index, 2390 address value, 2391 char type) { 2392 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD, 2393 obj, 2394 index, 2395 value, 2396 type); 2397 } 2398 2399 // static field 2400 inline bool CallbackInvoker::report_primitive_static_field(oop obj, 2401 jint index, 2402 address value, 2403 char type) { 2404 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 2405 obj, 2406 index, 2407 value, 2408 type); 2409 } 2410 2411 // report a JNI local (root object) to the profiler 2412 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) { 2413 if (is_basic_heap_walk()) { 2414 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL, 2415 thread_tag, 2416 depth, 2417 m, 2418 -1, 2419 obj); 2420 } else { 2421 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL, 2422 thread_tag, tid, 2423 depth, 2424 m, 2425 (jlocation)-1, 2426 -1, 2427 obj); 2428 } 2429 } 2430 2431 2432 // report a local (stack reference, root object) 2433 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag, 2434 jlong tid, 2435 jint depth, 2436 jmethodID method, 2437 jlocation bci, 2438 jint slot, 2439 oop obj) { 2440 if (is_basic_heap_walk()) { 2441 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL, 2442 thread_tag, 2443 depth, 2444 method, 2445 slot, 2446 obj); 2447 } else { 2448 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL, 2449 thread_tag, 2450 tid, 2451 depth, 2452 method, 2453 bci, 2454 slot, 2455 obj); 2456 } 2457 } 2458 2459 // report an object referencing a class. 2460 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) { 2461 if (is_basic_heap_walk()) { 2462 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2463 } else { 2464 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1); 2465 } 2466 } 2467 2468 // report a class referencing its class loader. 2469 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) { 2470 if (is_basic_heap_walk()) { 2471 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2472 } else { 2473 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2474 } 2475 } 2476 2477 // report a class referencing its signers. 2478 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) { 2479 if (is_basic_heap_walk()) { 2480 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1); 2481 } else { 2482 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1); 2483 } 2484 } 2485 2486 // report a class referencing its protection domain.. 2487 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) { 2488 if (is_basic_heap_walk()) { 2489 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2490 } else { 2491 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2492 } 2493 } 2494 2495 // report a class referencing its superclass. 2496 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) { 2497 if (is_basic_heap_walk()) { 2498 // Send this to be consistent with past implementation 2499 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2500 } else { 2501 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1); 2502 } 2503 } 2504 2505 // report a class referencing one of its interfaces. 2506 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) { 2507 if (is_basic_heap_walk()) { 2508 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1); 2509 } else { 2510 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1); 2511 } 2512 } 2513 2514 // report a class referencing one of its static fields. 2515 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) { 2516 if (is_basic_heap_walk()) { 2517 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2518 } else { 2519 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2520 } 2521 } 2522 2523 // report an array referencing an element object 2524 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) { 2525 if (is_basic_heap_walk()) { 2526 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2527 } else { 2528 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2529 } 2530 } 2531 2532 // report an object referencing an instance field object 2533 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) { 2534 if (is_basic_heap_walk()) { 2535 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot); 2536 } else { 2537 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot); 2538 } 2539 } 2540 2541 // report an array referencing an element object 2542 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) { 2543 if (is_basic_heap_walk()) { 2544 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2545 } else { 2546 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2547 } 2548 } 2549 2550 // A supporting closure used to process simple roots 2551 class SimpleRootsClosure : public OopClosure { 2552 private: 2553 jvmtiHeapReferenceKind _kind; 2554 bool _continue; 2555 2556 jvmtiHeapReferenceKind root_kind() { return _kind; } 2557 2558 public: 2559 void set_kind(jvmtiHeapReferenceKind kind) { 2560 _kind = kind; 2561 _continue = true; 2562 } 2563 2564 inline bool stopped() { 2565 return !_continue; 2566 } 2567 2568 void do_oop(oop* obj_p) { 2569 // iteration has terminated 2570 if (stopped()) { 2571 return; 2572 } 2573 2574 // ignore null or deleted handles 2575 oop o = *obj_p; 2576 if (o == NULL || o == JNIHandles::deleted_handle()) { 2577 return; 2578 } 2579 2580 assert(Universe::heap()->is_in_reserved(o), "should be impossible"); 2581 2582 jvmtiHeapReferenceKind kind = root_kind(); 2583 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) { 2584 // SystemDictionary::always_strong_oops_do reports the application 2585 // class loader as a root. We want this root to be reported as 2586 // a root kind of "OTHER" rather than "SYSTEM_CLASS". 2587 if (!o->is_instanceMirror()) { 2588 kind = JVMTI_HEAP_REFERENCE_OTHER; 2589 } 2590 } 2591 2592 // some objects are ignored - in the case of simple 2593 // roots it's mostly Symbol*s that we are skipping 2594 // here. 2595 if (!ServiceUtil::visible_oop(o)) { 2596 return; 2597 } 2598 2599 // invoke the callback 2600 _continue = CallbackInvoker::report_simple_root(kind, o); 2601 2602 } 2603 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2604 }; 2605 2606 // A supporting closure used to process JNI locals 2607 class JNILocalRootsClosure : public OopClosure { 2608 private: 2609 jlong _thread_tag; 2610 jlong _tid; 2611 jint _depth; 2612 jmethodID _method; 2613 bool _continue; 2614 public: 2615 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) { 2616 _thread_tag = thread_tag; 2617 _tid = tid; 2618 _depth = depth; 2619 _method = method; 2620 _continue = true; 2621 } 2622 2623 inline bool stopped() { 2624 return !_continue; 2625 } 2626 2627 void do_oop(oop* obj_p) { 2628 // iteration has terminated 2629 if (stopped()) { 2630 return; 2631 } 2632 2633 // ignore null or deleted handles 2634 oop o = *obj_p; 2635 if (o == NULL || o == JNIHandles::deleted_handle()) { 2636 return; 2637 } 2638 2639 if (!ServiceUtil::visible_oop(o)) { 2640 return; 2641 } 2642 2643 // invoke the callback 2644 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o); 2645 } 2646 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2647 }; 2648 2649 2650 // A VM operation to iterate over objects that are reachable from 2651 // a set of roots or an initial object. 2652 // 2653 // For VM_HeapWalkOperation the set of roots used is :- 2654 // 2655 // - All JNI global references 2656 // - All inflated monitors 2657 // - All classes loaded by the boot class loader (or all classes 2658 // in the event that class unloading is disabled) 2659 // - All java threads 2660 // - For each java thread then all locals and JNI local references 2661 // on the thread's execution stack 2662 // - All visible/explainable objects from Universes::oops_do 2663 // 2664 class VM_HeapWalkOperation: public VM_Operation { 2665 private: 2666 enum { 2667 initial_visit_stack_size = 4000 2668 }; 2669 2670 bool _is_advanced_heap_walk; // indicates FollowReferences 2671 JvmtiTagMap* _tag_map; 2672 Handle _initial_object; 2673 GrowableArray<oop>* _visit_stack; // the visit stack 2674 2675 bool _collecting_heap_roots; // are we collecting roots 2676 bool _following_object_refs; // are we following object references 2677 2678 bool _reporting_primitive_fields; // optional reporting 2679 bool _reporting_primitive_array_values; 2680 bool _reporting_string_values; 2681 2682 GrowableArray<oop>* create_visit_stack() { 2683 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true); 2684 } 2685 2686 // accessors 2687 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; } 2688 JvmtiTagMap* tag_map() const { return _tag_map; } 2689 Handle initial_object() const { return _initial_object; } 2690 2691 bool is_following_references() const { return _following_object_refs; } 2692 2693 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; } 2694 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; } 2695 bool is_reporting_string_values() const { return _reporting_string_values; } 2696 2697 GrowableArray<oop>* visit_stack() const { return _visit_stack; } 2698 2699 // iterate over the various object types 2700 inline bool iterate_over_array(oop o); 2701 inline bool iterate_over_type_array(oop o); 2702 inline bool iterate_over_class(oop o); 2703 inline bool iterate_over_object(oop o); 2704 2705 // root collection 2706 inline bool collect_simple_roots(); 2707 inline bool collect_stack_roots(); 2708 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk); 2709 2710 // visit an object 2711 inline bool visit(oop o); 2712 2713 public: 2714 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2715 Handle initial_object, 2716 BasicHeapWalkContext callbacks, 2717 const void* user_data); 2718 2719 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2720 Handle initial_object, 2721 AdvancedHeapWalkContext callbacks, 2722 const void* user_data); 2723 2724 ~VM_HeapWalkOperation(); 2725 2726 VMOp_Type type() const { return VMOp_HeapWalkOperation; } 2727 void doit(); 2728 }; 2729 2730 2731 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2732 Handle initial_object, 2733 BasicHeapWalkContext callbacks, 2734 const void* user_data) { 2735 _is_advanced_heap_walk = false; 2736 _tag_map = tag_map; 2737 _initial_object = initial_object; 2738 _following_object_refs = (callbacks.object_ref_callback() != NULL); 2739 _reporting_primitive_fields = false; 2740 _reporting_primitive_array_values = false; 2741 _reporting_string_values = false; 2742 _visit_stack = create_visit_stack(); 2743 2744 2745 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2746 } 2747 2748 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2749 Handle initial_object, 2750 AdvancedHeapWalkContext callbacks, 2751 const void* user_data) { 2752 _is_advanced_heap_walk = true; 2753 _tag_map = tag_map; 2754 _initial_object = initial_object; 2755 _following_object_refs = true; 2756 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);; 2757 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);; 2758 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);; 2759 _visit_stack = create_visit_stack(); 2760 2761 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2762 } 2763 2764 VM_HeapWalkOperation::~VM_HeapWalkOperation() { 2765 if (_following_object_refs) { 2766 assert(_visit_stack != NULL, "checking"); 2767 delete _visit_stack; 2768 _visit_stack = NULL; 2769 } 2770 } 2771 2772 // an array references its class and has a reference to 2773 // each element in the array 2774 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) { 2775 objArrayOop array = objArrayOop(o); 2776 2777 // array reference to its class 2778 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror(); 2779 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2780 return false; 2781 } 2782 2783 // iterate over the array and report each reference to a 2784 // non-null element 2785 for (int index=0; index<array->length(); index++) { 2786 oop elem = array->obj_at(index); 2787 if (elem == NULL) { 2788 continue; 2789 } 2790 2791 // report the array reference o[index] = elem 2792 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) { 2793 return false; 2794 } 2795 } 2796 return true; 2797 } 2798 2799 // a type array references its class 2800 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) { 2801 Klass* k = o->klass(); 2802 oop mirror = k->java_mirror(); 2803 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2804 return false; 2805 } 2806 2807 // report the array contents if required 2808 if (is_reporting_primitive_array_values()) { 2809 if (!CallbackInvoker::report_primitive_array_values(o)) { 2810 return false; 2811 } 2812 } 2813 return true; 2814 } 2815 2816 #ifdef ASSERT 2817 // verify that a static oop field is in range 2818 static inline bool verify_static_oop(InstanceKlass* ik, 2819 oop mirror, int offset) { 2820 address obj_p = (address)mirror + offset; 2821 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror); 2822 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize); 2823 assert(end >= start, "sanity check"); 2824 2825 if (obj_p >= start && obj_p < end) { 2826 return true; 2827 } else { 2828 return false; 2829 } 2830 } 2831 #endif // #ifdef ASSERT 2832 2833 // a class references its super class, interfaces, class loader, ... 2834 // and finally its static fields 2835 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) { 2836 int i; 2837 Klass* klass = java_lang_Class::as_Klass(java_class); 2838 2839 if (klass->oop_is_instance()) { 2840 InstanceKlass* ik = InstanceKlass::cast(klass); 2841 2842 // Ignore the class if it hasn't been initialized yet 2843 if (!ik->is_linked()) { 2844 return true; 2845 } 2846 2847 // get the java mirror 2848 oop mirror = klass->java_mirror(); 2849 2850 // super (only if something more interesting than java.lang.Object) 2851 Klass* java_super = ik->java_super(); 2852 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) { 2853 oop super = java_super->java_mirror(); 2854 if (!CallbackInvoker::report_superclass_reference(mirror, super)) { 2855 return false; 2856 } 2857 } 2858 2859 // class loader 2860 oop cl = ik->class_loader(); 2861 if (cl != NULL) { 2862 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) { 2863 return false; 2864 } 2865 } 2866 2867 // protection domain 2868 oop pd = ik->protection_domain(); 2869 if (pd != NULL) { 2870 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) { 2871 return false; 2872 } 2873 } 2874 2875 // signers 2876 oop signers = ik->signers(); 2877 if (signers != NULL) { 2878 if (!CallbackInvoker::report_signers_reference(mirror, signers)) { 2879 return false; 2880 } 2881 } 2882 2883 // references from the constant pool 2884 { 2885 ConstantPool* pool = ik->constants(); 2886 for (int i = 1; i < pool->length(); i++) { 2887 constantTag tag = pool->tag_at(i).value(); 2888 if (tag.is_string() || tag.is_klass()) { 2889 oop entry; 2890 if (tag.is_string()) { 2891 entry = pool->resolved_string_at(i); 2892 // If the entry is non-null it is resolved. 2893 if (entry == NULL) continue; 2894 } else { 2895 entry = pool->resolved_klass_at(i)->java_mirror(); 2896 } 2897 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) { 2898 return false; 2899 } 2900 } 2901 } 2902 } 2903 2904 // interfaces 2905 // (These will already have been reported as references from the constant pool 2906 // but are specified by IterateOverReachableObjects and must be reported). 2907 Array<Klass*>* interfaces = ik->local_interfaces(); 2908 for (i = 0; i < interfaces->length(); i++) { 2909 oop interf = ((Klass*)interfaces->at(i))->java_mirror(); 2910 if (interf == NULL) { 2911 continue; 2912 } 2913 if (!CallbackInvoker::report_interface_reference(mirror, interf)) { 2914 return false; 2915 } 2916 } 2917 2918 // iterate over the static fields 2919 2920 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 2921 for (i=0; i<field_map->field_count(); i++) { 2922 ClassFieldDescriptor* field = field_map->field_at(i); 2923 char type = field->field_type(); 2924 if (!is_primitive_field_type(type)) { 2925 oop fld_o = mirror->obj_field(field->field_offset()); 2926 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check"); 2927 if (fld_o != NULL) { 2928 int slot = field->field_index(); 2929 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) { 2930 delete field_map; 2931 return false; 2932 } 2933 } 2934 } else { 2935 if (is_reporting_primitive_fields()) { 2936 address addr = (address)mirror + field->field_offset(); 2937 int slot = field->field_index(); 2938 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) { 2939 delete field_map; 2940 return false; 2941 } 2942 } 2943 } 2944 } 2945 delete field_map; 2946 2947 return true; 2948 } 2949 2950 return true; 2951 } 2952 2953 // an object references a class and its instance fields 2954 // (static fields are ignored here as we report these as 2955 // references from the class). 2956 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) { 2957 // reference to the class 2958 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) { 2959 return false; 2960 } 2961 2962 // iterate over instance fields 2963 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o); 2964 for (int i=0; i<field_map->field_count(); i++) { 2965 ClassFieldDescriptor* field = field_map->field_at(i); 2966 char type = field->field_type(); 2967 if (!is_primitive_field_type(type)) { 2968 oop fld_o = o->obj_field(field->field_offset()); 2969 // ignore any objects that aren't visible to profiler 2970 if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) { 2971 assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not " 2972 "have references to Klass* anymore"); 2973 int slot = field->field_index(); 2974 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) { 2975 return false; 2976 } 2977 } 2978 } else { 2979 if (is_reporting_primitive_fields()) { 2980 // primitive instance field 2981 address addr = (address)o + field->field_offset(); 2982 int slot = field->field_index(); 2983 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) { 2984 return false; 2985 } 2986 } 2987 } 2988 } 2989 2990 // if the object is a java.lang.String 2991 if (is_reporting_string_values() && 2992 o->klass() == SystemDictionary::String_klass()) { 2993 if (!CallbackInvoker::report_string_value(o)) { 2994 return false; 2995 } 2996 } 2997 return true; 2998 } 2999 3000 3001 // Collects all simple (non-stack) roots except for threads; 3002 // threads are handled in collect_stack_roots() as an optimization. 3003 // if there's a heap root callback provided then the callback is 3004 // invoked for each simple root. 3005 // if an object reference callback is provided then all simple 3006 // roots are pushed onto the marking stack so that they can be 3007 // processed later 3008 // 3009 inline bool VM_HeapWalkOperation::collect_simple_roots() { 3010 SimpleRootsClosure blk; 3011 3012 // JNI globals 3013 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL); 3014 JNIHandles::oops_do(&blk); 3015 if (blk.stopped()) { 3016 return false; 3017 } 3018 3019 // Preloaded classes and loader from the system dictionary 3020 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS); 3021 SystemDictionary::always_strong_oops_do(&blk); 3022 KlassToOopClosure klass_blk(&blk); 3023 ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false); 3024 if (blk.stopped()) { 3025 return false; 3026 } 3027 3028 // Inflated monitors 3029 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR); 3030 ObjectSynchronizer::oops_do(&blk); 3031 if (blk.stopped()) { 3032 return false; 3033 } 3034 3035 // threads are now handled in collect_stack_roots() 3036 3037 // Other kinds of roots maintained by HotSpot 3038 // Many of these won't be visible but others (such as instances of important 3039 // exceptions) will be visible. 3040 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 3041 Universe::oops_do(&blk); 3042 3043 // If there are any non-perm roots in the code cache, visit them. 3044 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 3045 CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations); 3046 CodeCache::scavenge_root_nmethods_do(&look_in_blobs); 3047 3048 return true; 3049 } 3050 3051 // Walk the stack of a given thread and find all references (locals 3052 // and JNI calls) and report these as stack references 3053 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread, 3054 JNILocalRootsClosure* blk) 3055 { 3056 oop threadObj = java_thread->threadObj(); 3057 assert(threadObj != NULL, "sanity check"); 3058 3059 // only need to get the thread's tag once per thread 3060 jlong thread_tag = tag_for(_tag_map, threadObj); 3061 3062 // also need the thread id 3063 jlong tid = java_lang_Thread::thread_id(threadObj); 3064 3065 3066 if (java_thread->has_last_Java_frame()) { 3067 3068 // vframes are resource allocated 3069 Thread* current_thread = Thread::current(); 3070 ResourceMark rm(current_thread); 3071 HandleMark hm(current_thread); 3072 3073 RegisterMap reg_map(java_thread); 3074 frame f = java_thread->last_frame(); 3075 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); 3076 3077 bool is_top_frame = true; 3078 int depth = 0; 3079 frame* last_entry_frame = NULL; 3080 3081 while (vf != NULL) { 3082 if (vf->is_java_frame()) { 3083 3084 // java frame (interpreted, compiled, ...) 3085 javaVFrame *jvf = javaVFrame::cast(vf); 3086 3087 // the jmethodID 3088 jmethodID method = jvf->method()->jmethod_id(); 3089 3090 if (!(jvf->method()->is_native())) { 3091 jlocation bci = (jlocation)jvf->bci(); 3092 StackValueCollection* locals = jvf->locals(); 3093 for (int slot=0; slot<locals->size(); slot++) { 3094 if (locals->at(slot)->type() == T_OBJECT) { 3095 oop o = locals->obj_at(slot)(); 3096 if (o == NULL) { 3097 continue; 3098 } 3099 3100 // stack reference 3101 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3102 bci, slot, o)) { 3103 return false; 3104 } 3105 } 3106 } 3107 3108 StackValueCollection* exprs = jvf->expressions(); 3109 for (int index=0; index < exprs->size(); index++) { 3110 if (exprs->at(index)->type() == T_OBJECT) { 3111 oop o = exprs->obj_at(index)(); 3112 if (o == NULL) { 3113 continue; 3114 } 3115 3116 // stack reference 3117 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3118 bci, locals->size() + index, o)) { 3119 return false; 3120 } 3121 } 3122 } 3123 3124 } else { 3125 blk->set_context(thread_tag, tid, depth, method); 3126 if (is_top_frame) { 3127 // JNI locals for the top frame. 3128 java_thread->active_handles()->oops_do(blk); 3129 } else { 3130 if (last_entry_frame != NULL) { 3131 // JNI locals for the entry frame 3132 assert(last_entry_frame->is_entry_frame(), "checking"); 3133 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk); 3134 } 3135 } 3136 } 3137 last_entry_frame = NULL; 3138 depth++; 3139 } else { 3140 // externalVFrame - for an entry frame then we report the JNI locals 3141 // when we find the corresponding javaVFrame 3142 frame* fr = vf->frame_pointer(); 3143 assert(fr != NULL, "sanity check"); 3144 if (fr->is_entry_frame()) { 3145 last_entry_frame = fr; 3146 } 3147 } 3148 3149 vf = vf->sender(); 3150 is_top_frame = false; 3151 } 3152 } else { 3153 // no last java frame but there may be JNI locals 3154 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL); 3155 java_thread->active_handles()->oops_do(blk); 3156 } 3157 return true; 3158 } 3159 3160 3161 // Collects the simple roots for all threads and collects all 3162 // stack roots - for each thread it walks the execution 3163 // stack to find all references and local JNI refs. 3164 inline bool VM_HeapWalkOperation::collect_stack_roots() { 3165 JNILocalRootsClosure blk; 3166 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) { 3167 oop threadObj = thread->threadObj(); 3168 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) { 3169 // Collect the simple root for this thread before we 3170 // collect its stack roots 3171 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, 3172 threadObj)) { 3173 return false; 3174 } 3175 if (!collect_stack_roots(thread, &blk)) { 3176 return false; 3177 } 3178 } 3179 } 3180 return true; 3181 } 3182 3183 // visit an object 3184 // first mark the object as visited 3185 // second get all the outbound references from this object (in other words, all 3186 // the objects referenced by this object). 3187 // 3188 bool VM_HeapWalkOperation::visit(oop o) { 3189 // mark object as visited 3190 assert(!ObjectMarker::visited(o), "can't visit same object more than once"); 3191 ObjectMarker::mark(o); 3192 3193 // instance 3194 if (o->is_instance()) { 3195 if (o->klass() == SystemDictionary::Class_klass()) { 3196 if (!java_lang_Class::is_primitive(o)) { 3197 // a java.lang.Class 3198 return iterate_over_class(o); 3199 } 3200 } else { 3201 return iterate_over_object(o); 3202 } 3203 } 3204 3205 // object array 3206 if (o->is_objArray()) { 3207 return iterate_over_array(o); 3208 } 3209 3210 // type array 3211 if (o->is_typeArray()) { 3212 return iterate_over_type_array(o); 3213 } 3214 3215 return true; 3216 } 3217 3218 void VM_HeapWalkOperation::doit() { 3219 ResourceMark rm; 3220 ObjectMarkerController marker; 3221 ClassFieldMapCacheMark cm; 3222 3223 assert(visit_stack()->is_empty(), "visit stack must be empty"); 3224 3225 // the heap walk starts with an initial object or the heap roots 3226 if (initial_object().is_null()) { 3227 // If either collect_stack_roots() or collect_simple_roots() 3228 // returns false at this point, then there are no mark bits 3229 // to reset. 3230 ObjectMarker::set_needs_reset(false); 3231 3232 // Calling collect_stack_roots() before collect_simple_roots() 3233 // can result in a big performance boost for an agent that is 3234 // focused on analyzing references in the thread stacks. 3235 if (!collect_stack_roots()) return; 3236 3237 if (!collect_simple_roots()) return; 3238 3239 // no early return so enable heap traversal to reset the mark bits 3240 ObjectMarker::set_needs_reset(true); 3241 } else { 3242 visit_stack()->push(initial_object()()); 3243 } 3244 3245 // object references required 3246 if (is_following_references()) { 3247 3248 // visit each object until all reachable objects have been 3249 // visited or the callback asked to terminate the iteration. 3250 while (!visit_stack()->is_empty()) { 3251 oop o = visit_stack()->pop(); 3252 if (!ObjectMarker::visited(o)) { 3253 if (!visit(o)) { 3254 break; 3255 } 3256 } 3257 } 3258 } 3259 } 3260 3261 // iterate over all objects that are reachable from a set of roots 3262 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback, 3263 jvmtiStackReferenceCallback stack_ref_callback, 3264 jvmtiObjectReferenceCallback object_ref_callback, 3265 const void* user_data) { 3266 MutexLocker ml(Heap_lock); 3267 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback); 3268 VM_HeapWalkOperation op(this, Handle(), context, user_data); 3269 VMThread::execute(&op); 3270 } 3271 3272 // iterate over all objects that are reachable from a given object 3273 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object, 3274 jvmtiObjectReferenceCallback object_ref_callback, 3275 const void* user_data) { 3276 oop obj = JNIHandles::resolve(object); 3277 Handle initial_object(Thread::current(), obj); 3278 3279 MutexLocker ml(Heap_lock); 3280 BasicHeapWalkContext context(NULL, NULL, object_ref_callback); 3281 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3282 VMThread::execute(&op); 3283 } 3284 3285 // follow references from an initial object or the GC roots 3286 void JvmtiTagMap::follow_references(jint heap_filter, 3287 KlassHandle klass, 3288 jobject object, 3289 const jvmtiHeapCallbacks* callbacks, 3290 const void* user_data) 3291 { 3292 oop obj = JNIHandles::resolve(object); 3293 Handle initial_object(Thread::current(), obj); 3294 3295 MutexLocker ml(Heap_lock); 3296 AdvancedHeapWalkContext context(heap_filter, klass, callbacks); 3297 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3298 VMThread::execute(&op); 3299 } 3300 3301 3302 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { 3303 // No locks during VM bring-up (0 threads) and no safepoints after main 3304 // thread creation and before VMThread creation (1 thread); initial GC 3305 // verification can happen in that window which gets to here. 3306 assert(Threads::number_of_threads() <= 1 || 3307 SafepointSynchronize::is_at_safepoint(), 3308 "must be executed at a safepoint"); 3309 if (JvmtiEnv::environments_might_exist()) { 3310 JvmtiEnvIterator it; 3311 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) { 3312 JvmtiTagMap* tag_map = env->tag_map(); 3313 if (tag_map != NULL && !tag_map->is_empty()) { 3314 tag_map->do_weak_oops(is_alive, f); 3315 } 3316 } 3317 } 3318 } 3319 3320 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) { 3321 3322 // does this environment have the OBJECT_FREE event enabled 3323 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE); 3324 3325 // counters used for trace message 3326 int freed = 0; 3327 int moved = 0; 3328 3329 JvmtiTagHashmap* hashmap = this->hashmap(); 3330 3331 // reenable sizing (if disabled) 3332 hashmap->set_resizing_enabled(true); 3333 3334 // if the hashmap is empty then we can skip it 3335 if (hashmap->_entry_count == 0) { 3336 return; 3337 } 3338 3339 // now iterate through each entry in the table 3340 3341 JvmtiTagHashmapEntry** table = hashmap->table(); 3342 int size = hashmap->size(); 3343 3344 JvmtiTagHashmapEntry* delayed_add = NULL; 3345 3346 for (int pos = 0; pos < size; ++pos) { 3347 JvmtiTagHashmapEntry* entry = table[pos]; 3348 JvmtiTagHashmapEntry* prev = NULL; 3349 3350 while (entry != NULL) { 3351 JvmtiTagHashmapEntry* next = entry->next(); 3352 3353 oop* obj = entry->object_addr(); 3354 3355 // has object been GC'ed 3356 if (!is_alive->do_object_b(entry->object())) { 3357 // grab the tag 3358 jlong tag = entry->tag(); 3359 guarantee(tag != 0, "checking"); 3360 3361 // remove GC'ed entry from hashmap and return the 3362 // entry to the free list 3363 hashmap->remove(prev, pos, entry); 3364 destroy_entry(entry); 3365 3366 // post the event to the profiler 3367 if (post_object_free) { 3368 JvmtiExport::post_object_free(env(), tag); 3369 } 3370 3371 ++freed; 3372 } else { 3373 f->do_oop(entry->object_addr()); 3374 oop new_oop = entry->object(); 3375 3376 // if the object has moved then re-hash it and move its 3377 // entry to its new location. 3378 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size); 3379 if (new_pos != (unsigned int)pos) { 3380 if (prev == NULL) { 3381 table[pos] = next; 3382 } else { 3383 prev->set_next(next); 3384 } 3385 if (new_pos < (unsigned int)pos) { 3386 entry->set_next(table[new_pos]); 3387 table[new_pos] = entry; 3388 } else { 3389 // Delay adding this entry to it's new position as we'd end up 3390 // hitting it again during this iteration. 3391 entry->set_next(delayed_add); 3392 delayed_add = entry; 3393 } 3394 moved++; 3395 } else { 3396 // object didn't move 3397 prev = entry; 3398 } 3399 } 3400 3401 entry = next; 3402 } 3403 } 3404 3405 // Re-add all the entries which were kept aside 3406 while (delayed_add != NULL) { 3407 JvmtiTagHashmapEntry* next = delayed_add->next(); 3408 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size); 3409 delayed_add->set_next(table[pos]); 3410 table[pos] = delayed_add; 3411 delayed_add = next; 3412 } 3413 3414 // stats 3415 if (TraceJVMTIObjectTagging) { 3416 int post_total = hashmap->_entry_count; 3417 int pre_total = post_total + freed; 3418 3419 tty->print_cr("(%d->%d, %d freed, %d total moves)", 3420 pre_total, post_total, freed, moved); 3421 } 3422 }