1 /*
   2  * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.inline.hpp"
  27 #include "classfile/symbolTable.hpp"
  28 #include "classfile/systemDictionary.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "code/codeCache.hpp"
  31 #include "jvmtifiles/jvmtiEnv.hpp"
  32 #include "memory/resourceArea.hpp"
  33 #include "oops/access.inline.hpp"
  34 #include "oops/arrayOop.inline.hpp"
  35 #include "oops/instanceMirrorKlass.hpp"
  36 #include "oops/objArrayKlass.hpp"
  37 #include "oops/objArrayOop.inline.hpp"
  38 #include "oops/oop.inline.hpp"
  39 #include "oops/typeArrayOop.inline.hpp"
  40 #include "prims/jvmtiEventController.hpp"
  41 #include "prims/jvmtiEventController.inline.hpp"
  42 #include "prims/jvmtiExport.hpp"
  43 #include "prims/jvmtiImpl.hpp"
  44 #include "prims/jvmtiTagMap.hpp"
  45 #include "runtime/biasedLocking.hpp"
  46 #include "runtime/javaCalls.hpp"
  47 #include "runtime/jniHandles.inline.hpp"
  48 #include "runtime/mutex.hpp"
  49 #include "runtime/mutexLocker.hpp"
  50 #include "runtime/reflectionUtils.hpp"
  51 #include "runtime/thread.inline.hpp"
  52 #include "runtime/threadSMR.hpp"
  53 #include "runtime/vframe.hpp"
  54 #include "runtime/vmThread.hpp"
  55 #include "runtime/vm_operations.hpp"
  56 #include "services/serviceUtil.hpp"
  57 #include "utilities/macros.hpp"
  58 
  59 // JvmtiTagHashmapEntry
  60 //
  61 // Each entry encapsulates a reference to the tagged object
  62 // and the tag value. In addition an entry includes a next pointer which
  63 // is used to chain entries together.
  64 
  65 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
  66  private:
  67   friend class JvmtiTagMap;
  68 
  69   oop _object;                          // tagged object
  70   jlong _tag;                           // the tag
  71   JvmtiTagHashmapEntry* _next;          // next on the list
  72 
  73   inline void init(oop object, jlong tag) {
  74     _object = object;
  75     _tag = tag;
  76     _next = NULL;
  77   }
  78 
  79   // constructor
  80   JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); }
  81 
  82  public:
  83 
  84   // accessor methods
  85   inline oop* object_addr() { return &_object; }
  86   inline oop object()       { return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); }
  87   // Peek at the object without keeping it alive. The returned object must be
  88   // kept alive using a normal access if it leaks out of a thread transition from VM.
  89   inline oop object_peek()  {
  90     return RootAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr());
  91   }
  92   inline jlong tag() const  { return _tag; }
  93 
  94   inline void set_tag(jlong tag) {
  95     assert(tag != 0, "can't be zero");
  96     _tag = tag;
  97   }
  98 
  99   inline bool equals(oop object) {
 100     return object == object_peek();
 101   }
 102 
 103   inline JvmtiTagHashmapEntry* next() const        { return _next; }
 104   inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
 105 };
 106 
 107 
 108 // JvmtiTagHashmap
 109 //
 110 // A hashmap is essentially a table of pointers to entries. Entries
 111 // are hashed to a location, or position in the table, and then
 112 // chained from that location. The "key" for hashing is address of
 113 // the object, or oop. The "value" is the tag value.
 114 //
 115 // A hashmap maintains a count of the number entries in the hashmap
 116 // and resizes if the number of entries exceeds a given threshold.
 117 // The threshold is specified as a percentage of the size - for
 118 // example a threshold of 0.75 will trigger the hashmap to resize
 119 // if the number of entries is >75% of table size.
 120 //
 121 // A hashmap provides functions for adding, removing, and finding
 122 // entries. It also provides a function to iterate over all entries
 123 // in the hashmap.
 124 
 125 class JvmtiTagHashmap : public CHeapObj<mtInternal> {
 126  private:
 127   friend class JvmtiTagMap;
 128 
 129   enum {
 130     small_trace_threshold  = 10000,                  // threshold for tracing
 131     medium_trace_threshold = 100000,
 132     large_trace_threshold  = 1000000,
 133     initial_trace_threshold = small_trace_threshold
 134   };
 135 
 136   static int _sizes[];                  // array of possible hashmap sizes
 137   int _size;                            // actual size of the table
 138   int _size_index;                      // index into size table
 139 
 140   int _entry_count;                     // number of entries in the hashmap
 141 
 142   float _load_factor;                   // load factor as a % of the size
 143   int _resize_threshold;                // computed threshold to trigger resizing.
 144   bool _resizing_enabled;               // indicates if hashmap can resize
 145 
 146   int _trace_threshold;                 // threshold for trace messages
 147 
 148   JvmtiTagHashmapEntry** _table;        // the table of entries.
 149 
 150   // private accessors
 151   int resize_threshold() const                  { return _resize_threshold; }
 152   int trace_threshold() const                   { return _trace_threshold; }
 153 
 154   // initialize the hashmap
 155   void init(int size_index=0, float load_factor=4.0f) {
 156     int initial_size =  _sizes[size_index];
 157     _size_index = size_index;
 158     _size = initial_size;
 159     _entry_count = 0;
 160     _trace_threshold = initial_trace_threshold;
 161     _load_factor = load_factor;
 162     _resize_threshold = (int)(_load_factor * _size);
 163     _resizing_enabled = true;
 164     size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
 165     _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
 166     if (_table == NULL) {
 167       vm_exit_out_of_memory(s, OOM_MALLOC_ERROR,
 168         "unable to allocate initial hashtable for jvmti object tags");
 169     }
 170     for (int i=0; i<initial_size; i++) {
 171       _table[i] = NULL;
 172     }
 173   }
 174 
 175   // hash a given key (oop) with the specified size
 176   static unsigned int hash(oop key, int size) {
 177     // shift right to get better distribution (as these bits will be zero
 178     // with aligned addresses)
 179     unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key));
 180 #ifdef _LP64
 181     return (addr >> 3) % size;
 182 #else
 183     return (addr >> 2) % size;
 184 #endif
 185   }
 186 
 187   // hash a given key (oop)
 188   unsigned int hash(oop key) {
 189     return hash(key, _size);
 190   }
 191 
 192   // resize the hashmap - allocates a large table and re-hashes
 193   // all entries into the new table.
 194   void resize() {
 195     int new_size_index = _size_index+1;
 196     int new_size = _sizes[new_size_index];
 197     if (new_size < 0) {
 198       // hashmap already at maximum capacity
 199       return;
 200     }
 201 
 202     // allocate new table
 203     size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
 204     JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
 205     if (new_table == NULL) {
 206       warning("unable to allocate larger hashtable for jvmti object tags");
 207       set_resizing_enabled(false);
 208       return;
 209     }
 210 
 211     // initialize new table
 212     int i;
 213     for (i=0; i<new_size; i++) {
 214       new_table[i] = NULL;
 215     }
 216 
 217     // rehash all entries into the new table
 218     for (i=0; i<_size; i++) {
 219       JvmtiTagHashmapEntry* entry = _table[i];
 220       while (entry != NULL) {
 221         JvmtiTagHashmapEntry* next = entry->next();
 222         oop key = entry->object_peek();
 223         assert(key != NULL, "jni weak reference cleared!!");
 224         unsigned int h = hash(key, new_size);
 225         JvmtiTagHashmapEntry* anchor = new_table[h];
 226         if (anchor == NULL) {
 227           new_table[h] = entry;
 228           entry->set_next(NULL);
 229         } else {
 230           entry->set_next(anchor);
 231           new_table[h] = entry;
 232         }
 233         entry = next;
 234       }
 235     }
 236 
 237     // free old table and update settings.
 238     os::free((void*)_table);
 239     _table = new_table;
 240     _size_index = new_size_index;
 241     _size = new_size;
 242 
 243     // compute new resize threshold
 244     _resize_threshold = (int)(_load_factor * _size);
 245   }
 246 
 247 
 248   // internal remove function - remove an entry at a given position in the
 249   // table.
 250   inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
 251     assert(pos >= 0 && pos < _size, "out of range");
 252     if (prev == NULL) {
 253       _table[pos] = entry->next();
 254     } else {
 255       prev->set_next(entry->next());
 256     }
 257     assert(_entry_count > 0, "checking");
 258     _entry_count--;
 259   }
 260 
 261   // resizing switch
 262   bool is_resizing_enabled() const          { return _resizing_enabled; }
 263   void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
 264 
 265   // debugging
 266   void print_memory_usage();
 267   void compute_next_trace_threshold();
 268 
 269  public:
 270 
 271   // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
 272   // The preferred size is rounded down to an actual size.
 273   JvmtiTagHashmap(int size, float load_factor=0.0f) {
 274     int i=0;
 275     while (_sizes[i] < size) {
 276       if (_sizes[i] < 0) {
 277         assert(i > 0, "sanity check");
 278         i--;
 279         break;
 280       }
 281       i++;
 282     }
 283 
 284     // if a load factor is specified then use it, otherwise use default
 285     if (load_factor > 0.01f) {
 286       init(i, load_factor);
 287     } else {
 288       init(i);
 289     }
 290   }
 291 
 292   // create a JvmtiTagHashmap with default settings
 293   JvmtiTagHashmap() {
 294     init();
 295   }
 296 
 297   // release table when JvmtiTagHashmap destroyed
 298   ~JvmtiTagHashmap() {
 299     if (_table != NULL) {
 300       os::free((void*)_table);
 301       _table = NULL;
 302     }
 303   }
 304 
 305   // accessors
 306   int size() const                              { return _size; }
 307   JvmtiTagHashmapEntry** table() const          { return _table; }
 308   int entry_count() const                       { return _entry_count; }
 309 
 310   // find an entry in the hashmap, returns NULL if not found.
 311   inline JvmtiTagHashmapEntry* find(oop key) {
 312     unsigned int h = hash(key);
 313     JvmtiTagHashmapEntry* entry = _table[h];
 314     while (entry != NULL) {
 315       if (entry->equals(key)) {
 316          return entry;
 317       }
 318       entry = entry->next();
 319     }
 320     return NULL;
 321   }
 322 
 323 
 324   // add a new entry to hashmap
 325   inline void add(oop key, JvmtiTagHashmapEntry* entry) {
 326     assert(key != NULL, "checking");
 327     assert(find(key) == NULL, "duplicate detected");
 328     unsigned int h = hash(key);
 329     JvmtiTagHashmapEntry* anchor = _table[h];
 330     if (anchor == NULL) {
 331       _table[h] = entry;
 332       entry->set_next(NULL);
 333     } else {
 334       entry->set_next(anchor);
 335       _table[h] = entry;
 336     }
 337 
 338     _entry_count++;
 339     if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) {
 340       print_memory_usage();
 341       compute_next_trace_threshold();
 342     }
 343 
 344     // if the number of entries exceed the threshold then resize
 345     if (entry_count() > resize_threshold() && is_resizing_enabled()) {
 346       resize();
 347     }
 348   }
 349 
 350   // remove an entry with the given key.
 351   inline JvmtiTagHashmapEntry* remove(oop key) {
 352     unsigned int h = hash(key);
 353     JvmtiTagHashmapEntry* entry = _table[h];
 354     JvmtiTagHashmapEntry* prev = NULL;
 355     while (entry != NULL) {
 356       if (entry->equals(key)) {
 357         break;
 358       }
 359       prev = entry;
 360       entry = entry->next();
 361     }
 362     if (entry != NULL) {
 363       remove(prev, h, entry);
 364     }
 365     return entry;
 366   }
 367 
 368   // iterate over all entries in the hashmap
 369   void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
 370 };
 371 
 372 // possible hashmap sizes - odd primes that roughly double in size.
 373 // To avoid excessive resizing the odd primes from 4801-76831 and
 374 // 76831-307261 have been removed. The list must be terminated by -1.
 375 int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
 376     2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
 377 
 378 
 379 // A supporting class for iterating over all entries in Hashmap
 380 class JvmtiTagHashmapEntryClosure {
 381  public:
 382   virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
 383 };
 384 
 385 
 386 // iterate over all entries in the hashmap
 387 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 388   for (int i=0; i<_size; i++) {
 389     JvmtiTagHashmapEntry* entry = _table[i];
 390     JvmtiTagHashmapEntry* prev = NULL;
 391     while (entry != NULL) {
 392       // obtain the next entry before invoking do_entry - this is
 393       // necessary because do_entry may remove the entry from the
 394       // hashmap.
 395       JvmtiTagHashmapEntry* next = entry->next();
 396       closure->do_entry(entry);
 397       entry = next;
 398      }
 399   }
 400 }
 401 
 402 // debugging
 403 void JvmtiTagHashmap::print_memory_usage() {
 404   intptr_t p = (intptr_t)this;
 405   tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
 406 
 407   // table + entries in KB
 408   int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
 409     entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
 410 
 411   int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
 412   tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
 413     entry_count(), hashmap_usage, weak_globals_usage);
 414 }
 415 
 416 // compute threshold for the next trace message
 417 void JvmtiTagHashmap::compute_next_trace_threshold() {
 418   _trace_threshold = entry_count();
 419   if (trace_threshold() < medium_trace_threshold) {
 420     _trace_threshold += small_trace_threshold;
 421   } else {
 422     if (trace_threshold() < large_trace_threshold) {
 423       _trace_threshold += medium_trace_threshold;
 424     } else {
 425       _trace_threshold += large_trace_threshold;
 426     }
 427   }
 428 }
 429 
 430 // create a JvmtiTagMap
 431 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
 432   _env(env),
 433   _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
 434   _free_entries(NULL),
 435   _free_entries_count(0)
 436 {
 437   assert(JvmtiThreadState_lock->is_locked(), "sanity check");
 438   assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
 439 
 440   _hashmap = new JvmtiTagHashmap();
 441 
 442   // finally add us to the environment
 443   ((JvmtiEnvBase *)env)->set_tag_map(this);
 444 }
 445 
 446 
 447 // destroy a JvmtiTagMap
 448 JvmtiTagMap::~JvmtiTagMap() {
 449 
 450   // no lock acquired as we assume the enclosing environment is
 451   // also being destroryed.
 452   ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
 453 
 454   JvmtiTagHashmapEntry** table = _hashmap->table();
 455   for (int j = 0; j < _hashmap->size(); j++) {
 456     JvmtiTagHashmapEntry* entry = table[j];
 457     while (entry != NULL) {
 458       JvmtiTagHashmapEntry* next = entry->next();
 459       delete entry;
 460       entry = next;
 461     }
 462   }
 463 
 464   // finally destroy the hashmap
 465   delete _hashmap;
 466   _hashmap = NULL;
 467 
 468   // remove any entries on the free list
 469   JvmtiTagHashmapEntry* entry = _free_entries;
 470   while (entry != NULL) {
 471     JvmtiTagHashmapEntry* next = entry->next();
 472     delete entry;
 473     entry = next;
 474   }
 475   _free_entries = NULL;
 476 }
 477 
 478 // create a hashmap entry
 479 // - if there's an entry on the (per-environment) free list then this
 480 // is returned. Otherwise an new entry is allocated.
 481 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
 482   assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
 483   JvmtiTagHashmapEntry* entry;
 484   if (_free_entries == NULL) {
 485     entry = new JvmtiTagHashmapEntry(ref, tag);
 486   } else {
 487     assert(_free_entries_count > 0, "mismatched _free_entries_count");
 488     _free_entries_count--;
 489     entry = _free_entries;
 490     _free_entries = entry->next();
 491     entry->init(ref, tag);
 492   }
 493   return entry;
 494 }
 495 
 496 // destroy an entry by returning it to the free list
 497 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
 498   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 499   // limit the size of the free list
 500   if (_free_entries_count >= max_free_entries) {
 501     delete entry;
 502   } else {
 503     entry->set_next(_free_entries);
 504     _free_entries = entry;
 505     _free_entries_count++;
 506   }
 507 }
 508 
 509 // returns the tag map for the given environments. If the tag map
 510 // doesn't exist then it is created.
 511 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
 512   JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
 513   if (tag_map == NULL) {
 514     MutexLocker mu(JvmtiThreadState_lock);
 515     tag_map = ((JvmtiEnvBase*)env)->tag_map();
 516     if (tag_map == NULL) {
 517       tag_map = new JvmtiTagMap(env);
 518     }
 519   } else {
 520     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 521   }
 522   return tag_map;
 523 }
 524 
 525 // iterate over all entries in the tag map.
 526 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 527   hashmap()->entry_iterate(closure);
 528 }
 529 
 530 // returns true if the hashmaps are empty
 531 bool JvmtiTagMap::is_empty() {
 532   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 533   return hashmap()->entry_count() == 0;
 534 }
 535 
 536 
 537 // Return the tag value for an object, or 0 if the object is
 538 // not tagged
 539 //
 540 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
 541   JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
 542   if (entry == NULL) {
 543     return 0;
 544   } else {
 545     return entry->tag();
 546   }
 547 }
 548 
 549 
 550 // A CallbackWrapper is a support class for querying and tagging an object
 551 // around a callback to a profiler. The constructor does pre-callback
 552 // work to get the tag value, klass tag value, ... and the destructor
 553 // does the post-callback work of tagging or untagging the object.
 554 //
 555 // {
 556 //   CallbackWrapper wrapper(tag_map, o);
 557 //
 558 //   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
 559 //
 560 // } // wrapper goes out of scope here which results in the destructor
 561 //      checking to see if the object has been tagged, untagged, or the
 562 //      tag value has changed.
 563 //
 564 class CallbackWrapper : public StackObj {
 565  private:
 566   JvmtiTagMap* _tag_map;
 567   JvmtiTagHashmap* _hashmap;
 568   JvmtiTagHashmapEntry* _entry;
 569   oop _o;
 570   jlong _obj_size;
 571   jlong _obj_tag;
 572   jlong _klass_tag;
 573 
 574  protected:
 575   JvmtiTagMap* tag_map() const      { return _tag_map; }
 576 
 577   // invoked post-callback to tag, untag, or update the tag of an object
 578   void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
 579                                        JvmtiTagHashmapEntry* entry, jlong obj_tag);
 580  public:
 581   CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
 582     assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
 583            "MT unsafe or must be VM thread");
 584 
 585     // object to tag
 586     _o = o;
 587 
 588     // object size
 589     _obj_size = (jlong)_o->size() * wordSize;
 590 
 591     // record the context
 592     _tag_map = tag_map;
 593     _hashmap = tag_map->hashmap();
 594     _entry = _hashmap->find(_o);
 595 
 596     // get object tag
 597     _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
 598 
 599     // get the class and the class's tag value
 600     assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?");
 601 
 602     _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
 603   }
 604 
 605   ~CallbackWrapper() {
 606     post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
 607   }
 608 
 609   inline jlong* obj_tag_p()                     { return &_obj_tag; }
 610   inline jlong obj_size() const                 { return _obj_size; }
 611   inline jlong obj_tag() const                  { return _obj_tag; }
 612   inline jlong klass_tag() const                { return _klass_tag; }
 613 };
 614 
 615 
 616 
 617 // callback post-callback to tag, untag, or update the tag of an object
 618 void inline CallbackWrapper::post_callback_tag_update(oop o,
 619                                                       JvmtiTagHashmap* hashmap,
 620                                                       JvmtiTagHashmapEntry* entry,
 621                                                       jlong obj_tag) {
 622   if (entry == NULL) {
 623     if (obj_tag != 0) {
 624       // callback has tagged the object
 625       assert(Thread::current()->is_VM_thread(), "must be VMThread");
 626       entry = tag_map()->create_entry(o, obj_tag);
 627       hashmap->add(o, entry);
 628     }
 629   } else {
 630     // object was previously tagged - the callback may have untagged
 631     // the object or changed the tag value
 632     if (obj_tag == 0) {
 633 
 634       JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
 635       assert(entry_removed == entry, "checking");
 636       tag_map()->destroy_entry(entry);
 637 
 638     } else {
 639       if (obj_tag != entry->tag()) {
 640          entry->set_tag(obj_tag);
 641       }
 642     }
 643   }
 644 }
 645 
 646 // An extended CallbackWrapper used when reporting an object reference
 647 // to the agent.
 648 //
 649 // {
 650 //   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
 651 //
 652 //   (*callback)(wrapper.klass_tag(),
 653 //               wrapper.obj_size(),
 654 //               wrapper.obj_tag_p()
 655 //               wrapper.referrer_tag_p(), ...)
 656 //
 657 // } // wrapper goes out of scope here which results in the destructor
 658 //      checking to see if the referrer object has been tagged, untagged,
 659 //      or the tag value has changed.
 660 //
 661 class TwoOopCallbackWrapper : public CallbackWrapper {
 662  private:
 663   bool _is_reference_to_self;
 664   JvmtiTagHashmap* _referrer_hashmap;
 665   JvmtiTagHashmapEntry* _referrer_entry;
 666   oop _referrer;
 667   jlong _referrer_obj_tag;
 668   jlong _referrer_klass_tag;
 669   jlong* _referrer_tag_p;
 670 
 671   bool is_reference_to_self() const             { return _is_reference_to_self; }
 672 
 673  public:
 674   TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
 675     CallbackWrapper(tag_map, o)
 676   {
 677     // self reference needs to be handled in a special way
 678     _is_reference_to_self = (referrer == o);
 679 
 680     if (_is_reference_to_self) {
 681       _referrer_klass_tag = klass_tag();
 682       _referrer_tag_p = obj_tag_p();
 683     } else {
 684       _referrer = referrer;
 685       // record the context
 686       _referrer_hashmap = tag_map->hashmap();
 687       _referrer_entry = _referrer_hashmap->find(_referrer);
 688 
 689       // get object tag
 690       _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
 691       _referrer_tag_p = &_referrer_obj_tag;
 692 
 693       // get referrer class tag.
 694       _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
 695     }
 696   }
 697 
 698   ~TwoOopCallbackWrapper() {
 699     if (!is_reference_to_self()){
 700       post_callback_tag_update(_referrer,
 701                                _referrer_hashmap,
 702                                _referrer_entry,
 703                                _referrer_obj_tag);
 704     }
 705   }
 706 
 707   // address of referrer tag
 708   // (for a self reference this will return the same thing as obj_tag_p())
 709   inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
 710 
 711   // referrer's class tag
 712   inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
 713 };
 714 
 715 // tag an object
 716 //
 717 // This function is performance critical. If many threads attempt to tag objects
 718 // around the same time then it's possible that the Mutex associated with the
 719 // tag map will be a hot lock.
 720 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
 721   MutexLocker ml(lock());
 722 
 723   // resolve the object
 724   oop o = JNIHandles::resolve_non_null(object);
 725 
 726   // see if the object is already tagged
 727   JvmtiTagHashmap* hashmap = _hashmap;
 728   JvmtiTagHashmapEntry* entry = hashmap->find(o);
 729 
 730   // if the object is not already tagged then we tag it
 731   if (entry == NULL) {
 732     if (tag != 0) {
 733       entry = create_entry(o, tag);
 734       hashmap->add(o, entry);
 735     } else {
 736       // no-op
 737     }
 738   } else {
 739     // if the object is already tagged then we either update
 740     // the tag (if a new tag value has been provided)
 741     // or remove the object if the new tag value is 0.
 742     if (tag == 0) {
 743       hashmap->remove(o);
 744       destroy_entry(entry);
 745     } else {
 746       entry->set_tag(tag);
 747     }
 748   }
 749 }
 750 
 751 // get the tag for an object
 752 jlong JvmtiTagMap::get_tag(jobject object) {
 753   MutexLocker ml(lock());
 754 
 755   // resolve the object
 756   oop o = JNIHandles::resolve_non_null(object);
 757 
 758   return tag_for(this, o);
 759 }
 760 
 761 
 762 // Helper class used to describe the static or instance fields of a class.
 763 // For each field it holds the field index (as defined by the JVMTI specification),
 764 // the field type, and the offset.
 765 
 766 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
 767  private:
 768   int _field_index;
 769   int _field_offset;
 770   char _field_type;
 771  public:
 772   ClassFieldDescriptor(int index, char type, int offset) :
 773     _field_index(index), _field_type(type), _field_offset(offset) {
 774   }
 775   int field_index()  const  { return _field_index; }
 776   char field_type()  const  { return _field_type; }
 777   int field_offset() const  { return _field_offset; }
 778 };
 779 
 780 class ClassFieldMap: public CHeapObj<mtInternal> {
 781  private:
 782   enum {
 783     initial_field_count = 5
 784   };
 785 
 786   // list of field descriptors
 787   GrowableArray<ClassFieldDescriptor*>* _fields;
 788 
 789   // constructor
 790   ClassFieldMap();
 791 
 792   // add a field
 793   void add(int index, char type, int offset);
 794 
 795   // returns the field count for the given class
 796   static int compute_field_count(InstanceKlass* ik);
 797 
 798  public:
 799   ~ClassFieldMap();
 800 
 801   // access
 802   int field_count()                     { return _fields->length(); }
 803   ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
 804 
 805   // functions to create maps of static or instance fields
 806   static ClassFieldMap* create_map_of_static_fields(Klass* k);
 807   static ClassFieldMap* create_map_of_instance_fields(oop obj);
 808 };
 809 
 810 ClassFieldMap::ClassFieldMap() {
 811   _fields = new (ResourceObj::C_HEAP, mtInternal)
 812     GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
 813 }
 814 
 815 ClassFieldMap::~ClassFieldMap() {
 816   for (int i=0; i<_fields->length(); i++) {
 817     delete _fields->at(i);
 818   }
 819   delete _fields;
 820 }
 821 
 822 void ClassFieldMap::add(int index, char type, int offset) {
 823   ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
 824   _fields->append(field);
 825 }
 826 
 827 // Returns a heap allocated ClassFieldMap to describe the static fields
 828 // of the given class.
 829 //
 830 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
 831   HandleMark hm;
 832   InstanceKlass* ik = InstanceKlass::cast(k);
 833 
 834   // create the field map
 835   ClassFieldMap* field_map = new ClassFieldMap();
 836 
 837   FilteredFieldStream f(ik, false, false);
 838   int max_field_index = f.field_count()-1;
 839 
 840   int index = 0;
 841   for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) {
 842     // ignore instance fields
 843     if (!fld.access_flags().is_static()) {
 844       continue;
 845     }
 846     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 847   }
 848   return field_map;
 849 }
 850 
 851 // Returns a heap allocated ClassFieldMap to describe the instance fields
 852 // of the given class. All instance fields are included (this means public
 853 // and private fields declared in superclasses and superinterfaces too).
 854 //
 855 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
 856   HandleMark hm;
 857   InstanceKlass* ik = InstanceKlass::cast(obj->klass());
 858 
 859   // create the field map
 860   ClassFieldMap* field_map = new ClassFieldMap();
 861 
 862   FilteredFieldStream f(ik, false, false);
 863 
 864   int max_field_index = f.field_count()-1;
 865 
 866   int index = 0;
 867   for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) {
 868     // ignore static fields
 869     if (fld.access_flags().is_static()) {
 870       continue;
 871     }
 872     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 873   }
 874 
 875   return field_map;
 876 }
 877 
 878 // Helper class used to cache a ClassFileMap for the instance fields of
 879 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
 880 // heap iteration and avoid creating a field map for each object in the heap
 881 // (only need to create the map when the first instance of a class is encountered).
 882 //
 883 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
 884  private:
 885    enum {
 886      initial_class_count = 200
 887    };
 888   ClassFieldMap* _field_map;
 889 
 890   ClassFieldMap* field_map() const          { return _field_map; }
 891 
 892   JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
 893   ~JvmtiCachedClassFieldMap();
 894 
 895   static GrowableArray<InstanceKlass*>* _class_list;
 896   static void add_to_class_list(InstanceKlass* ik);
 897 
 898  public:
 899   // returns the field map for a given object (returning map cached
 900   // by InstanceKlass if possible
 901   static ClassFieldMap* get_map_of_instance_fields(oop obj);
 902 
 903   // removes the field map from all instanceKlasses - should be
 904   // called before VM operation completes
 905   static void clear_cache();
 906 
 907   // returns the number of ClassFieldMap cached by instanceKlasses
 908   static int cached_field_map_count();
 909 };
 910 
 911 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
 912 
 913 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
 914   _field_map = field_map;
 915 }
 916 
 917 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
 918   if (_field_map != NULL) {
 919     delete _field_map;
 920   }
 921 }
 922 
 923 // Marker class to ensure that the class file map cache is only used in a defined
 924 // scope.
 925 class ClassFieldMapCacheMark : public StackObj {
 926  private:
 927    static bool _is_active;
 928  public:
 929    ClassFieldMapCacheMark() {
 930      assert(Thread::current()->is_VM_thread(), "must be VMThread");
 931      assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
 932      assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
 933      _is_active = true;
 934    }
 935    ~ClassFieldMapCacheMark() {
 936      JvmtiCachedClassFieldMap::clear_cache();
 937      _is_active = false;
 938    }
 939    static bool is_active() { return _is_active; }
 940 };
 941 
 942 bool ClassFieldMapCacheMark::_is_active;
 943 
 944 
 945 // record that the given InstanceKlass is caching a field map
 946 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
 947   if (_class_list == NULL) {
 948     _class_list = new (ResourceObj::C_HEAP, mtInternal)
 949       GrowableArray<InstanceKlass*>(initial_class_count, true);
 950   }
 951   _class_list->push(ik);
 952 }
 953 
 954 // returns the instance field map for the given object
 955 // (returns field map cached by the InstanceKlass if possible)
 956 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
 957   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 958   assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
 959 
 960   Klass* k = obj->klass();
 961   InstanceKlass* ik = InstanceKlass::cast(k);
 962 
 963   // return cached map if possible
 964   JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 965   if (cached_map != NULL) {
 966     assert(cached_map->field_map() != NULL, "missing field list");
 967     return cached_map->field_map();
 968   } else {
 969     ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
 970     cached_map = new JvmtiCachedClassFieldMap(field_map);
 971     ik->set_jvmti_cached_class_field_map(cached_map);
 972     add_to_class_list(ik);
 973     return field_map;
 974   }
 975 }
 976 
 977 // remove the fields maps cached from all instanceKlasses
 978 void JvmtiCachedClassFieldMap::clear_cache() {
 979   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 980   if (_class_list != NULL) {
 981     for (int i = 0; i < _class_list->length(); i++) {
 982       InstanceKlass* ik = _class_list->at(i);
 983       JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 984       assert(cached_map != NULL, "should not be NULL");
 985       ik->set_jvmti_cached_class_field_map(NULL);
 986       delete cached_map;  // deletes the encapsulated field map
 987     }
 988     delete _class_list;
 989     _class_list = NULL;
 990   }
 991 }
 992 
 993 // returns the number of ClassFieldMap cached by instanceKlasses
 994 int JvmtiCachedClassFieldMap::cached_field_map_count() {
 995   return (_class_list == NULL) ? 0 : _class_list->length();
 996 }
 997 
 998 // helper function to indicate if an object is filtered by its tag or class tag
 999 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
1000                                               jlong klass_tag,
1001                                               int heap_filter) {
1002   // apply the heap filter
1003   if (obj_tag != 0) {
1004     // filter out tagged objects
1005     if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
1006   } else {
1007     // filter out untagged objects
1008     if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
1009   }
1010   if (klass_tag != 0) {
1011     // filter out objects with tagged classes
1012     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1013   } else {
1014     // filter out objects with untagged classes.
1015     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1016   }
1017   return false;
1018 }
1019 
1020 // helper function to indicate if an object is filtered by a klass filter
1021 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) {
1022   if (klass_filter != NULL) {
1023     if (obj->klass() != klass_filter) {
1024       return true;
1025     }
1026   }
1027   return false;
1028 }
1029 
1030 // helper function to tell if a field is a primitive field or not
1031 static inline bool is_primitive_field_type(char type) {
1032   return (type != 'L' && type != '[');
1033 }
1034 
1035 // helper function to copy the value from location addr to jvalue.
1036 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1037   switch (value_type) {
1038     case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1039     case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
1040     case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
1041     case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
1042     case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
1043     case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
1044     case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
1045     case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
1046     default: ShouldNotReachHere();
1047   }
1048 }
1049 
1050 // helper function to invoke string primitive value callback
1051 // returns visit control flags
1052 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1053                                          CallbackWrapper* wrapper,
1054                                          oop str,
1055                                          void* user_data)
1056 {
1057   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1058 
1059   typeArrayOop s_value = java_lang_String::value(str);
1060 
1061   // JDK-6584008: the value field may be null if a String instance is
1062   // partially constructed.
1063   if (s_value == NULL) {
1064     return 0;
1065   }
1066   // get the string value and length
1067   // (string value may be offset from the base)
1068   int s_len = java_lang_String::length(str);
1069   bool is_latin1 = java_lang_String::is_latin1(str);
1070   jchar* value;
1071   if (s_len > 0) {
1072     if (!is_latin1) {
1073       value = s_value->char_at_addr(0);
1074     } else {
1075       // Inflate latin1 encoded string to UTF16
1076       jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
1077       for (int i = 0; i < s_len; i++) {
1078         buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
1079       }
1080       value = &buf[0];
1081     }
1082   } else {
1083     // Don't use char_at_addr(0) if length is 0
1084     value = (jchar*) s_value->base(T_CHAR);
1085   }
1086 
1087   // invoke the callback
1088   jint res = (*cb)(wrapper->klass_tag(),
1089                    wrapper->obj_size(),
1090                    wrapper->obj_tag_p(),
1091                    value,
1092                    (jint)s_len,
1093                    user_data);
1094 
1095   if (is_latin1 && s_len > 0) {
1096     FREE_C_HEAP_ARRAY(jchar, value);
1097   }
1098   return res;
1099 }
1100 
1101 // helper function to invoke string primitive value callback
1102 // returns visit control flags
1103 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1104                                                   CallbackWrapper* wrapper,
1105                                                   oop obj,
1106                                                   void* user_data)
1107 {
1108   assert(obj->is_typeArray(), "not a primitive array");
1109 
1110   // get base address of first element
1111   typeArrayOop array = typeArrayOop(obj);
1112   BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1113   void* elements = array->base(type);
1114 
1115   // jvmtiPrimitiveType is defined so this mapping is always correct
1116   jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1117 
1118   return (*cb)(wrapper->klass_tag(),
1119                wrapper->obj_size(),
1120                wrapper->obj_tag_p(),
1121                (jint)array->length(),
1122                elem_type,
1123                elements,
1124                user_data);
1125 }
1126 
1127 // helper function to invoke the primitive field callback for all static fields
1128 // of a given class
1129 static jint invoke_primitive_field_callback_for_static_fields
1130   (CallbackWrapper* wrapper,
1131    oop obj,
1132    jvmtiPrimitiveFieldCallback cb,
1133    void* user_data)
1134 {
1135   // for static fields only the index will be set
1136   static jvmtiHeapReferenceInfo reference_info = { 0 };
1137 
1138   assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1139   if (java_lang_Class::is_primitive(obj)) {
1140     return 0;
1141   }
1142   Klass* klass = java_lang_Class::as_Klass(obj);
1143 
1144   // ignore classes for object and type arrays
1145   if (!klass->is_instance_klass()) {
1146     return 0;
1147   }
1148 
1149   // ignore classes which aren't linked yet
1150   InstanceKlass* ik = InstanceKlass::cast(klass);
1151   if (!ik->is_linked()) {
1152     return 0;
1153   }
1154 
1155   // get the field map
1156   ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1157 
1158   // invoke the callback for each static primitive field
1159   for (int i=0; i<field_map->field_count(); i++) {
1160     ClassFieldDescriptor* field = field_map->field_at(i);
1161 
1162     // ignore non-primitive fields
1163     char type = field->field_type();
1164     if (!is_primitive_field_type(type)) {
1165       continue;
1166     }
1167     // one-to-one mapping
1168     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1169 
1170     // get offset and field value
1171     int offset = field->field_offset();
1172     address addr = (address)klass->java_mirror() + offset;
1173     jvalue value;
1174     copy_to_jvalue(&value, addr, value_type);
1175 
1176     // field index
1177     reference_info.field.index = field->field_index();
1178 
1179     // invoke the callback
1180     jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1181                      &reference_info,
1182                      wrapper->klass_tag(),
1183                      wrapper->obj_tag_p(),
1184                      value,
1185                      value_type,
1186                      user_data);
1187     if (res & JVMTI_VISIT_ABORT) {
1188       delete field_map;
1189       return res;
1190     }
1191   }
1192 
1193   delete field_map;
1194   return 0;
1195 }
1196 
1197 // helper function to invoke the primitive field callback for all instance fields
1198 // of a given object
1199 static jint invoke_primitive_field_callback_for_instance_fields(
1200   CallbackWrapper* wrapper,
1201   oop obj,
1202   jvmtiPrimitiveFieldCallback cb,
1203   void* user_data)
1204 {
1205   // for instance fields only the index will be set
1206   static jvmtiHeapReferenceInfo reference_info = { 0 };
1207 
1208   // get the map of the instance fields
1209   ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1210 
1211   // invoke the callback for each instance primitive field
1212   for (int i=0; i<fields->field_count(); i++) {
1213     ClassFieldDescriptor* field = fields->field_at(i);
1214 
1215     // ignore non-primitive fields
1216     char type = field->field_type();
1217     if (!is_primitive_field_type(type)) {
1218       continue;
1219     }
1220     // one-to-one mapping
1221     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1222 
1223     // get offset and field value
1224     int offset = field->field_offset();
1225     address addr = (address)obj + offset;
1226     jvalue value;
1227     copy_to_jvalue(&value, addr, value_type);
1228 
1229     // field index
1230     reference_info.field.index = field->field_index();
1231 
1232     // invoke the callback
1233     jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1234                      &reference_info,
1235                      wrapper->klass_tag(),
1236                      wrapper->obj_tag_p(),
1237                      value,
1238                      value_type,
1239                      user_data);
1240     if (res & JVMTI_VISIT_ABORT) {
1241       return res;
1242     }
1243   }
1244   return 0;
1245 }
1246 
1247 
1248 // VM operation to iterate over all objects in the heap (both reachable
1249 // and unreachable)
1250 class VM_HeapIterateOperation: public VM_Operation {
1251  private:
1252   ObjectClosure* _blk;
1253  public:
1254   VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1255 
1256   VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1257   void doit() {
1258     // allows class files maps to be cached during iteration
1259     ClassFieldMapCacheMark cm;
1260 
1261     // make sure that heap is parsable (fills TLABs with filler objects)
1262     Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1263 
1264     // Verify heap before iteration - if the heap gets corrupted then
1265     // JVMTI's IterateOverHeap will crash.
1266     if (VerifyBeforeIteration) {
1267       Universe::verify();
1268     }
1269 
1270     // do the iteration
1271     // If this operation encounters a bad object when using CMS,
1272     // consider using safe_object_iterate() which avoids perm gen
1273     // objects that may contain bad references.
1274     Universe::heap()->object_iterate(_blk);
1275   }
1276 
1277 };
1278 
1279 
1280 // An ObjectClosure used to support the deprecated IterateOverHeap and
1281 // IterateOverInstancesOfClass functions
1282 class IterateOverHeapObjectClosure: public ObjectClosure {
1283  private:
1284   JvmtiTagMap* _tag_map;
1285   Klass* _klass;
1286   jvmtiHeapObjectFilter _object_filter;
1287   jvmtiHeapObjectCallback _heap_object_callback;
1288   const void* _user_data;
1289 
1290   // accessors
1291   JvmtiTagMap* tag_map() const                    { return _tag_map; }
1292   jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
1293   jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1294   Klass* klass() const                            { return _klass; }
1295   const void* user_data() const                   { return _user_data; }
1296 
1297   // indicates if iteration has been aborted
1298   bool _iteration_aborted;
1299   bool is_iteration_aborted() const               { return _iteration_aborted; }
1300   void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
1301 
1302  public:
1303   IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1304                                Klass* klass,
1305                                jvmtiHeapObjectFilter object_filter,
1306                                jvmtiHeapObjectCallback heap_object_callback,
1307                                const void* user_data) :
1308     _tag_map(tag_map),
1309     _klass(klass),
1310     _object_filter(object_filter),
1311     _heap_object_callback(heap_object_callback),
1312     _user_data(user_data),
1313     _iteration_aborted(false)
1314   {
1315   }
1316 
1317   void do_object(oop o);
1318 };
1319 
1320 // invoked for each object in the heap
1321 void IterateOverHeapObjectClosure::do_object(oop o) {
1322   // check if iteration has been halted
1323   if (is_iteration_aborted()) return;
1324 
1325   // ignore any objects that aren't visible to profiler
1326   if (!ServiceUtil::visible_oop(o)) return;
1327 
1328   // instanceof check when filtering by klass
1329   if (klass() != NULL && !o->is_a(klass())) {
1330     return;
1331   }
1332   // prepare for the calllback
1333   CallbackWrapper wrapper(tag_map(), o);
1334 
1335   // if the object is tagged and we're only interested in untagged objects
1336   // then don't invoke the callback. Similiarly, if the object is untagged
1337   // and we're only interested in tagged objects we skip the callback.
1338   if (wrapper.obj_tag() != 0) {
1339     if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1340   } else {
1341     if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1342   }
1343 
1344   // invoke the agent's callback
1345   jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1346                                                        wrapper.obj_size(),
1347                                                        wrapper.obj_tag_p(),
1348                                                        (void*)user_data());
1349   if (control == JVMTI_ITERATION_ABORT) {
1350     set_iteration_aborted(true);
1351   }
1352 }
1353 
1354 // An ObjectClosure used to support the IterateThroughHeap function
1355 class IterateThroughHeapObjectClosure: public ObjectClosure {
1356  private:
1357   JvmtiTagMap* _tag_map;
1358   Klass* _klass;
1359   int _heap_filter;
1360   const jvmtiHeapCallbacks* _callbacks;
1361   const void* _user_data;
1362 
1363   // accessor functions
1364   JvmtiTagMap* tag_map() const                     { return _tag_map; }
1365   int heap_filter() const                          { return _heap_filter; }
1366   const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1367   Klass* klass() const                             { return _klass; }
1368   const void* user_data() const                    { return _user_data; }
1369 
1370   // indicates if the iteration has been aborted
1371   bool _iteration_aborted;
1372   bool is_iteration_aborted() const                { return _iteration_aborted; }
1373 
1374   // used to check the visit control flags. If the abort flag is set
1375   // then we set the iteration aborted flag so that the iteration completes
1376   // without processing any further objects
1377   bool check_flags_for_abort(jint flags) {
1378     bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1379     if (is_abort) {
1380       _iteration_aborted = true;
1381     }
1382     return is_abort;
1383   }
1384 
1385  public:
1386   IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1387                                   Klass* klass,
1388                                   int heap_filter,
1389                                   const jvmtiHeapCallbacks* heap_callbacks,
1390                                   const void* user_data) :
1391     _tag_map(tag_map),
1392     _klass(klass),
1393     _heap_filter(heap_filter),
1394     _callbacks(heap_callbacks),
1395     _user_data(user_data),
1396     _iteration_aborted(false)
1397   {
1398   }
1399 
1400   void do_object(oop o);
1401 };
1402 
1403 // invoked for each object in the heap
1404 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1405   // check if iteration has been halted
1406   if (is_iteration_aborted()) return;
1407 
1408   // ignore any objects that aren't visible to profiler
1409   if (!ServiceUtil::visible_oop(obj)) return;
1410 
1411   // apply class filter
1412   if (is_filtered_by_klass_filter(obj, klass())) return;
1413 
1414   // prepare for callback
1415   CallbackWrapper wrapper(tag_map(), obj);
1416 
1417   // check if filtered by the heap filter
1418   if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1419     return;
1420   }
1421 
1422   // for arrays we need the length, otherwise -1
1423   bool is_array = obj->is_array();
1424   int len = is_array ? arrayOop(obj)->length() : -1;
1425 
1426   // invoke the object callback (if callback is provided)
1427   if (callbacks()->heap_iteration_callback != NULL) {
1428     jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1429     jint res = (*cb)(wrapper.klass_tag(),
1430                      wrapper.obj_size(),
1431                      wrapper.obj_tag_p(),
1432                      (jint)len,
1433                      (void*)user_data());
1434     if (check_flags_for_abort(res)) return;
1435   }
1436 
1437   // for objects and classes we report primitive fields if callback provided
1438   if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1439     jint res;
1440     jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1441     if (obj->klass() == SystemDictionary::Class_klass()) {
1442       res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1443                                                                     obj,
1444                                                                     cb,
1445                                                                     (void*)user_data());
1446     } else {
1447       res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1448                                                                       obj,
1449                                                                       cb,
1450                                                                       (void*)user_data());
1451     }
1452     if (check_flags_for_abort(res)) return;
1453   }
1454 
1455   // string callback
1456   if (!is_array &&
1457       callbacks()->string_primitive_value_callback != NULL &&
1458       obj->klass() == SystemDictionary::String_klass()) {
1459     jint res = invoke_string_value_callback(
1460                 callbacks()->string_primitive_value_callback,
1461                 &wrapper,
1462                 obj,
1463                 (void*)user_data() );
1464     if (check_flags_for_abort(res)) return;
1465   }
1466 
1467   // array callback
1468   if (is_array &&
1469       callbacks()->array_primitive_value_callback != NULL &&
1470       obj->is_typeArray()) {
1471     jint res = invoke_array_primitive_value_callback(
1472                callbacks()->array_primitive_value_callback,
1473                &wrapper,
1474                obj,
1475                (void*)user_data() );
1476     if (check_flags_for_abort(res)) return;
1477   }
1478 };
1479 
1480 
1481 // Deprecated function to iterate over all objects in the heap
1482 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1483                                     Klass* klass,
1484                                     jvmtiHeapObjectCallback heap_object_callback,
1485                                     const void* user_data)
1486 {
1487   MutexLocker ml(Heap_lock);
1488   IterateOverHeapObjectClosure blk(this,
1489                                    klass,
1490                                    object_filter,
1491                                    heap_object_callback,
1492                                    user_data);
1493   VM_HeapIterateOperation op(&blk);
1494   VMThread::execute(&op);
1495 }
1496 
1497 
1498 // Iterates over all objects in the heap
1499 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1500                                        Klass* klass,
1501                                        const jvmtiHeapCallbacks* callbacks,
1502                                        const void* user_data)
1503 {
1504   MutexLocker ml(Heap_lock);
1505   IterateThroughHeapObjectClosure blk(this,
1506                                       klass,
1507                                       heap_filter,
1508                                       callbacks,
1509                                       user_data);
1510   VM_HeapIterateOperation op(&blk);
1511   VMThread::execute(&op);
1512 }
1513 
1514 // support class for get_objects_with_tags
1515 
1516 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1517  private:
1518   JvmtiEnv* _env;
1519   jlong* _tags;
1520   jint _tag_count;
1521 
1522   GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1523   GrowableArray<uint64_t>* _tag_results;    // collected tags
1524 
1525  public:
1526   TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1527     _env = env;
1528     _tags = (jlong*)tags;
1529     _tag_count = tag_count;
1530     _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1531     _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1532   }
1533 
1534   ~TagObjectCollector() {
1535     delete _object_results;
1536     delete _tag_results;
1537   }
1538 
1539   // for each tagged object check if the tag value matches
1540   // - if it matches then we create a JNI local reference to the object
1541   // and record the reference and tag value.
1542   //
1543   void do_entry(JvmtiTagHashmapEntry* entry) {
1544     for (int i=0; i<_tag_count; i++) {
1545       if (_tags[i] == entry->tag()) {
1546         // The reference in this tag map could be the only (implicitly weak)
1547         // reference to that object. If we hand it out, we need to keep it live wrt
1548         // SATB marking similar to other j.l.ref.Reference referents. This is
1549         // achieved by using a phantom load in the object() accessor.
1550         oop o = entry->object();
1551         assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1552         jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1553         _object_results->append(ref);
1554         _tag_results->append((uint64_t)entry->tag());
1555       }
1556     }
1557   }
1558 
1559   // return the results from the collection
1560   //
1561   jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1562     jvmtiError error;
1563     int count = _object_results->length();
1564     assert(count >= 0, "sanity check");
1565 
1566     // if object_result_ptr is not NULL then allocate the result and copy
1567     // in the object references.
1568     if (object_result_ptr != NULL) {
1569       error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1570       if (error != JVMTI_ERROR_NONE) {
1571         return error;
1572       }
1573       for (int i=0; i<count; i++) {
1574         (*object_result_ptr)[i] = _object_results->at(i);
1575       }
1576     }
1577 
1578     // if tag_result_ptr is not NULL then allocate the result and copy
1579     // in the tag values.
1580     if (tag_result_ptr != NULL) {
1581       error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1582       if (error != JVMTI_ERROR_NONE) {
1583         if (object_result_ptr != NULL) {
1584           _env->Deallocate((unsigned char*)object_result_ptr);
1585         }
1586         return error;
1587       }
1588       for (int i=0; i<count; i++) {
1589         (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1590       }
1591     }
1592 
1593     *count_ptr = count;
1594     return JVMTI_ERROR_NONE;
1595   }
1596 };
1597 
1598 // return the list of objects with the specified tags
1599 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1600   jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1601 
1602   TagObjectCollector collector(env(), tags, count);
1603   {
1604     // iterate over all tagged objects
1605     MutexLocker ml(lock());
1606     entry_iterate(&collector);
1607   }
1608   return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1609 }
1610 
1611 
1612 // ObjectMarker is used to support the marking objects when walking the
1613 // heap.
1614 //
1615 // This implementation uses the existing mark bits in an object for
1616 // marking. Objects that are marked must later have their headers restored.
1617 // As most objects are unlocked and don't have their identity hash computed
1618 // we don't have to save their headers. Instead we save the headers that
1619 // are "interesting". Later when the headers are restored this implementation
1620 // restores all headers to their initial value and then restores the few
1621 // objects that had interesting headers.
1622 //
1623 // Future work: This implementation currently uses growable arrays to save
1624 // the oop and header of interesting objects. As an optimization we could
1625 // use the same technique as the GC and make use of the unused area
1626 // between top() and end().
1627 //
1628 
1629 // An ObjectClosure used to restore the mark bits of an object
1630 class RestoreMarksClosure : public ObjectClosure {
1631  public:
1632   void do_object(oop o) {
1633     if (o != NULL) {
1634       markOop mark = o->mark();
1635       if (mark->is_marked()) {
1636         o->init_mark();
1637       }
1638     }
1639   }
1640 };
1641 
1642 // ObjectMarker provides the mark and visited functions
1643 class ObjectMarker : AllStatic {
1644  private:
1645   // saved headers
1646   static GrowableArray<oop>* _saved_oop_stack;
1647   static GrowableArray<markOop>* _saved_mark_stack;
1648   static bool _needs_reset;                  // do we need to reset mark bits?
1649 
1650  public:
1651   static void init();                       // initialize
1652   static void done();                       // clean-up
1653 
1654   static inline void mark(oop o);           // mark an object
1655   static inline bool visited(oop o);        // check if object has been visited
1656 
1657   static inline bool needs_reset()            { return _needs_reset; }
1658   static inline void set_needs_reset(bool v)  { _needs_reset = v; }
1659 };
1660 
1661 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1662 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1663 bool ObjectMarker::_needs_reset = true;  // need to reset mark bits by default
1664 
1665 // initialize ObjectMarker - prepares for object marking
1666 void ObjectMarker::init() {
1667   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1668 
1669   // prepare heap for iteration
1670   Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1671 
1672   // create stacks for interesting headers
1673   _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1674   _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1675 
1676   if (UseBiasedLocking) {
1677     BiasedLocking::preserve_marks();
1678   }
1679 }
1680 
1681 // Object marking is done so restore object headers
1682 void ObjectMarker::done() {
1683   // iterate over all objects and restore the mark bits to
1684   // their initial value
1685   RestoreMarksClosure blk;
1686   if (needs_reset()) {
1687     Universe::heap()->object_iterate(&blk);
1688   } else {
1689     // We don't need to reset mark bits on this call, but reset the
1690     // flag to the default for the next call.
1691     set_needs_reset(true);
1692   }
1693 
1694   // now restore the interesting headers
1695   for (int i = 0; i < _saved_oop_stack->length(); i++) {
1696     oop o = _saved_oop_stack->at(i);
1697     markOop mark = _saved_mark_stack->at(i);
1698     o->set_mark(mark);
1699   }
1700 
1701   if (UseBiasedLocking) {
1702     BiasedLocking::restore_marks();
1703   }
1704 
1705   // free the stacks
1706   delete _saved_oop_stack;
1707   delete _saved_mark_stack;
1708 }
1709 
1710 // mark an object
1711 inline void ObjectMarker::mark(oop o) {
1712   assert(Universe::heap()->is_in(o), "sanity check");
1713   assert(!o->mark()->is_marked(), "should only mark an object once");
1714 
1715   // object's mark word
1716   markOop mark = o->mark();
1717 
1718   if (mark->must_be_preserved(o)) {
1719     _saved_mark_stack->push(mark);
1720     _saved_oop_stack->push(o);
1721   }
1722 
1723   // mark the object
1724   o->set_mark(markOopDesc::prototype()->set_marked());
1725 }
1726 
1727 // return true if object is marked
1728 inline bool ObjectMarker::visited(oop o) {
1729   return o->mark()->is_marked();
1730 }
1731 
1732 // Stack allocated class to help ensure that ObjectMarker is used
1733 // correctly. Constructor initializes ObjectMarker, destructor calls
1734 // ObjectMarker's done() function to restore object headers.
1735 class ObjectMarkerController : public StackObj {
1736  public:
1737   ObjectMarkerController() {
1738     ObjectMarker::init();
1739   }
1740   ~ObjectMarkerController() {
1741     ObjectMarker::done();
1742   }
1743 };
1744 
1745 
1746 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1747 // (not performance critical as only used for roots)
1748 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1749   switch (kind) {
1750     case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1751     case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1752     case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
1753     case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1754     case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1755     case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1756     case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1757     default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1758   }
1759 }
1760 
1761 // Base class for all heap walk contexts. The base class maintains a flag
1762 // to indicate if the context is valid or not.
1763 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1764  private:
1765   bool _valid;
1766  public:
1767   HeapWalkContext(bool valid)                   { _valid = valid; }
1768   void invalidate()                             { _valid = false; }
1769   bool is_valid() const                         { return _valid; }
1770 };
1771 
1772 // A basic heap walk context for the deprecated heap walking functions.
1773 // The context for a basic heap walk are the callbacks and fields used by
1774 // the referrer caching scheme.
1775 class BasicHeapWalkContext: public HeapWalkContext {
1776  private:
1777   jvmtiHeapRootCallback _heap_root_callback;
1778   jvmtiStackReferenceCallback _stack_ref_callback;
1779   jvmtiObjectReferenceCallback _object_ref_callback;
1780 
1781   // used for caching
1782   oop _last_referrer;
1783   jlong _last_referrer_tag;
1784 
1785  public:
1786   BasicHeapWalkContext() : HeapWalkContext(false) { }
1787 
1788   BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1789                        jvmtiStackReferenceCallback stack_ref_callback,
1790                        jvmtiObjectReferenceCallback object_ref_callback) :
1791     HeapWalkContext(true),
1792     _heap_root_callback(heap_root_callback),
1793     _stack_ref_callback(stack_ref_callback),
1794     _object_ref_callback(object_ref_callback),
1795     _last_referrer(NULL),
1796     _last_referrer_tag(0) {
1797   }
1798 
1799   // accessors
1800   jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1801   jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1802   jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1803 
1804   oop last_referrer() const               { return _last_referrer; }
1805   void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1806   jlong last_referrer_tag() const         { return _last_referrer_tag; }
1807   void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1808 };
1809 
1810 // The advanced heap walk context for the FollowReferences functions.
1811 // The context is the callbacks, and the fields used for filtering.
1812 class AdvancedHeapWalkContext: public HeapWalkContext {
1813  private:
1814   jint _heap_filter;
1815   Klass* _klass_filter;
1816   const jvmtiHeapCallbacks* _heap_callbacks;
1817 
1818  public:
1819   AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1820 
1821   AdvancedHeapWalkContext(jint heap_filter,
1822                            Klass* klass_filter,
1823                            const jvmtiHeapCallbacks* heap_callbacks) :
1824     HeapWalkContext(true),
1825     _heap_filter(heap_filter),
1826     _klass_filter(klass_filter),
1827     _heap_callbacks(heap_callbacks) {
1828   }
1829 
1830   // accessors
1831   jint heap_filter() const         { return _heap_filter; }
1832   Klass* klass_filter() const      { return _klass_filter; }
1833 
1834   const jvmtiHeapReferenceCallback heap_reference_callback() const {
1835     return _heap_callbacks->heap_reference_callback;
1836   };
1837   const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1838     return _heap_callbacks->primitive_field_callback;
1839   }
1840   const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1841     return _heap_callbacks->array_primitive_value_callback;
1842   }
1843   const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1844     return _heap_callbacks->string_primitive_value_callback;
1845   }
1846 };
1847 
1848 // The CallbackInvoker is a class with static functions that the heap walk can call
1849 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1850 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1851 // mode is for the newer FollowReferences function which supports a lot of
1852 // additional callbacks.
1853 class CallbackInvoker : AllStatic {
1854  private:
1855   // heap walk styles
1856   enum { basic, advanced };
1857   static int _heap_walk_type;
1858   static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1859   static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1860 
1861   // context for basic style heap walk
1862   static BasicHeapWalkContext _basic_context;
1863   static BasicHeapWalkContext* basic_context() {
1864     assert(_basic_context.is_valid(), "invalid");
1865     return &_basic_context;
1866   }
1867 
1868   // context for advanced style heap walk
1869   static AdvancedHeapWalkContext _advanced_context;
1870   static AdvancedHeapWalkContext* advanced_context() {
1871     assert(_advanced_context.is_valid(), "invalid");
1872     return &_advanced_context;
1873   }
1874 
1875   // context needed for all heap walks
1876   static JvmtiTagMap* _tag_map;
1877   static const void* _user_data;
1878   static GrowableArray<oop>* _visit_stack;
1879 
1880   // accessors
1881   static JvmtiTagMap* tag_map()                        { return _tag_map; }
1882   static const void* user_data()                       { return _user_data; }
1883   static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1884 
1885   // if the object hasn't been visited then push it onto the visit stack
1886   // so that it will be visited later
1887   static inline bool check_for_visit(oop obj) {
1888     if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1889     return true;
1890   }
1891 
1892   // invoke basic style callbacks
1893   static inline bool invoke_basic_heap_root_callback
1894     (jvmtiHeapRootKind root_kind, oop obj);
1895   static inline bool invoke_basic_stack_ref_callback
1896     (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1897      int slot, oop obj);
1898   static inline bool invoke_basic_object_reference_callback
1899     (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1900 
1901   // invoke advanced style callbacks
1902   static inline bool invoke_advanced_heap_root_callback
1903     (jvmtiHeapReferenceKind ref_kind, oop obj);
1904   static inline bool invoke_advanced_stack_ref_callback
1905     (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1906      jmethodID method, jlocation bci, jint slot, oop obj);
1907   static inline bool invoke_advanced_object_reference_callback
1908     (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1909 
1910   // used to report the value of primitive fields
1911   static inline bool report_primitive_field
1912     (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1913 
1914  public:
1915   // initialize for basic mode
1916   static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1917                                              GrowableArray<oop>* visit_stack,
1918                                              const void* user_data,
1919                                              BasicHeapWalkContext context);
1920 
1921   // initialize for advanced mode
1922   static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1923                                                 GrowableArray<oop>* visit_stack,
1924                                                 const void* user_data,
1925                                                 AdvancedHeapWalkContext context);
1926 
1927    // functions to report roots
1928   static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1929   static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1930     jmethodID m, oop o);
1931   static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1932     jmethodID method, jlocation bci, jint slot, oop o);
1933 
1934   // functions to report references
1935   static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1936   static inline bool report_class_reference(oop referrer, oop referree);
1937   static inline bool report_class_loader_reference(oop referrer, oop referree);
1938   static inline bool report_signers_reference(oop referrer, oop referree);
1939   static inline bool report_protection_domain_reference(oop referrer, oop referree);
1940   static inline bool report_superclass_reference(oop referrer, oop referree);
1941   static inline bool report_interface_reference(oop referrer, oop referree);
1942   static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1943   static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1944   static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1945   static inline bool report_primitive_array_values(oop array);
1946   static inline bool report_string_value(oop str);
1947   static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1948   static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1949 };
1950 
1951 // statics
1952 int CallbackInvoker::_heap_walk_type;
1953 BasicHeapWalkContext CallbackInvoker::_basic_context;
1954 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1955 JvmtiTagMap* CallbackInvoker::_tag_map;
1956 const void* CallbackInvoker::_user_data;
1957 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1958 
1959 // initialize for basic heap walk (IterateOverReachableObjects et al)
1960 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1961                                                      GrowableArray<oop>* visit_stack,
1962                                                      const void* user_data,
1963                                                      BasicHeapWalkContext context) {
1964   _tag_map = tag_map;
1965   _visit_stack = visit_stack;
1966   _user_data = user_data;
1967   _basic_context = context;
1968   _advanced_context.invalidate();       // will trigger assertion if used
1969   _heap_walk_type = basic;
1970 }
1971 
1972 // initialize for advanced heap walk (FollowReferences)
1973 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1974                                                         GrowableArray<oop>* visit_stack,
1975                                                         const void* user_data,
1976                                                         AdvancedHeapWalkContext context) {
1977   _tag_map = tag_map;
1978   _visit_stack = visit_stack;
1979   _user_data = user_data;
1980   _advanced_context = context;
1981   _basic_context.invalidate();      // will trigger assertion if used
1982   _heap_walk_type = advanced;
1983 }
1984 
1985 
1986 // invoke basic style heap root callback
1987 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1988   assert(ServiceUtil::visible_oop(obj), "checking");
1989 
1990   // if we heap roots should be reported
1991   jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1992   if (cb == NULL) {
1993     return check_for_visit(obj);
1994   }
1995 
1996   CallbackWrapper wrapper(tag_map(), obj);
1997   jvmtiIterationControl control = (*cb)(root_kind,
1998                                         wrapper.klass_tag(),
1999                                         wrapper.obj_size(),
2000                                         wrapper.obj_tag_p(),
2001                                         (void*)user_data());
2002   // push root to visit stack when following references
2003   if (control == JVMTI_ITERATION_CONTINUE &&
2004       basic_context()->object_ref_callback() != NULL) {
2005     visit_stack()->push(obj);
2006   }
2007   return control != JVMTI_ITERATION_ABORT;
2008 }
2009 
2010 // invoke basic style stack ref callback
2011 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
2012                                                              jlong thread_tag,
2013                                                              jint depth,
2014                                                              jmethodID method,
2015                                                              int slot,
2016                                                              oop obj) {
2017   assert(ServiceUtil::visible_oop(obj), "checking");
2018 
2019   // if we stack refs should be reported
2020   jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
2021   if (cb == NULL) {
2022     return check_for_visit(obj);
2023   }
2024 
2025   CallbackWrapper wrapper(tag_map(), obj);
2026   jvmtiIterationControl control = (*cb)(root_kind,
2027                                         wrapper.klass_tag(),
2028                                         wrapper.obj_size(),
2029                                         wrapper.obj_tag_p(),
2030                                         thread_tag,
2031                                         depth,
2032                                         method,
2033                                         slot,
2034                                         (void*)user_data());
2035   // push root to visit stack when following references
2036   if (control == JVMTI_ITERATION_CONTINUE &&
2037       basic_context()->object_ref_callback() != NULL) {
2038     visit_stack()->push(obj);
2039   }
2040   return control != JVMTI_ITERATION_ABORT;
2041 }
2042 
2043 // invoke basic style object reference callback
2044 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2045                                                                     oop referrer,
2046                                                                     oop referree,
2047                                                                     jint index) {
2048 
2049   assert(ServiceUtil::visible_oop(referrer), "checking");
2050   assert(ServiceUtil::visible_oop(referree), "checking");
2051 
2052   BasicHeapWalkContext* context = basic_context();
2053 
2054   // callback requires the referrer's tag. If it's the same referrer
2055   // as the last call then we use the cached value.
2056   jlong referrer_tag;
2057   if (referrer == context->last_referrer()) {
2058     referrer_tag = context->last_referrer_tag();
2059   } else {
2060     referrer_tag = tag_for(tag_map(), referrer);
2061   }
2062 
2063   // do the callback
2064   CallbackWrapper wrapper(tag_map(), referree);
2065   jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2066   jvmtiIterationControl control = (*cb)(ref_kind,
2067                                         wrapper.klass_tag(),
2068                                         wrapper.obj_size(),
2069                                         wrapper.obj_tag_p(),
2070                                         referrer_tag,
2071                                         index,
2072                                         (void*)user_data());
2073 
2074   // record referrer and referrer tag. For self-references record the
2075   // tag value from the callback as this might differ from referrer_tag.
2076   context->set_last_referrer(referrer);
2077   if (referrer == referree) {
2078     context->set_last_referrer_tag(*wrapper.obj_tag_p());
2079   } else {
2080     context->set_last_referrer_tag(referrer_tag);
2081   }
2082 
2083   if (control == JVMTI_ITERATION_CONTINUE) {
2084     return check_for_visit(referree);
2085   } else {
2086     return control != JVMTI_ITERATION_ABORT;
2087   }
2088 }
2089 
2090 // invoke advanced style heap root callback
2091 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2092                                                                 oop obj) {
2093   assert(ServiceUtil::visible_oop(obj), "checking");
2094 
2095   AdvancedHeapWalkContext* context = advanced_context();
2096 
2097   // check that callback is provided
2098   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2099   if (cb == NULL) {
2100     return check_for_visit(obj);
2101   }
2102 
2103   // apply class filter
2104   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2105     return check_for_visit(obj);
2106   }
2107 
2108   // setup the callback wrapper
2109   CallbackWrapper wrapper(tag_map(), obj);
2110 
2111   // apply tag filter
2112   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2113                                  wrapper.klass_tag(),
2114                                  context->heap_filter())) {
2115     return check_for_visit(obj);
2116   }
2117 
2118   // for arrays we need the length, otherwise -1
2119   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2120 
2121   // invoke the callback
2122   jint res  = (*cb)(ref_kind,
2123                     NULL, // referrer info
2124                     wrapper.klass_tag(),
2125                     0,    // referrer_class_tag is 0 for heap root
2126                     wrapper.obj_size(),
2127                     wrapper.obj_tag_p(),
2128                     NULL, // referrer_tag_p
2129                     len,
2130                     (void*)user_data());
2131   if (res & JVMTI_VISIT_ABORT) {
2132     return false;// referrer class tag
2133   }
2134   if (res & JVMTI_VISIT_OBJECTS) {
2135     check_for_visit(obj);
2136   }
2137   return true;
2138 }
2139 
2140 // report a reference from a thread stack to an object
2141 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2142                                                                 jlong thread_tag,
2143                                                                 jlong tid,
2144                                                                 int depth,
2145                                                                 jmethodID method,
2146                                                                 jlocation bci,
2147                                                                 jint slot,
2148                                                                 oop obj) {
2149   assert(ServiceUtil::visible_oop(obj), "checking");
2150 
2151   AdvancedHeapWalkContext* context = advanced_context();
2152 
2153   // check that callback is provider
2154   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2155   if (cb == NULL) {
2156     return check_for_visit(obj);
2157   }
2158 
2159   // apply class filter
2160   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2161     return check_for_visit(obj);
2162   }
2163 
2164   // setup the callback wrapper
2165   CallbackWrapper wrapper(tag_map(), obj);
2166 
2167   // apply tag filter
2168   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2169                                  wrapper.klass_tag(),
2170                                  context->heap_filter())) {
2171     return check_for_visit(obj);
2172   }
2173 
2174   // setup the referrer info
2175   jvmtiHeapReferenceInfo reference_info;
2176   reference_info.stack_local.thread_tag = thread_tag;
2177   reference_info.stack_local.thread_id = tid;
2178   reference_info.stack_local.depth = depth;
2179   reference_info.stack_local.method = method;
2180   reference_info.stack_local.location = bci;
2181   reference_info.stack_local.slot = slot;
2182 
2183   // for arrays we need the length, otherwise -1
2184   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2185 
2186   // call into the agent
2187   int res = (*cb)(ref_kind,
2188                   &reference_info,
2189                   wrapper.klass_tag(),
2190                   0,    // referrer_class_tag is 0 for heap root (stack)
2191                   wrapper.obj_size(),
2192                   wrapper.obj_tag_p(),
2193                   NULL, // referrer_tag is 0 for root
2194                   len,
2195                   (void*)user_data());
2196 
2197   if (res & JVMTI_VISIT_ABORT) {
2198     return false;
2199   }
2200   if (res & JVMTI_VISIT_OBJECTS) {
2201     check_for_visit(obj);
2202   }
2203   return true;
2204 }
2205 
2206 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2207 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2208 #define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
2209                       | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
2210                       | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2211                       | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2212                       | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
2213                       | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2214 
2215 // invoke the object reference callback to report a reference
2216 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2217                                                                        oop referrer,
2218                                                                        oop obj,
2219                                                                        jint index)
2220 {
2221   // field index is only valid field in reference_info
2222   static jvmtiHeapReferenceInfo reference_info = { 0 };
2223 
2224   assert(ServiceUtil::visible_oop(referrer), "checking");
2225   assert(ServiceUtil::visible_oop(obj), "checking");
2226 
2227   AdvancedHeapWalkContext* context = advanced_context();
2228 
2229   // check that callback is provider
2230   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2231   if (cb == NULL) {
2232     return check_for_visit(obj);
2233   }
2234 
2235   // apply class filter
2236   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2237     return check_for_visit(obj);
2238   }
2239 
2240   // setup the callback wrapper
2241   TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2242 
2243   // apply tag filter
2244   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2245                                  wrapper.klass_tag(),
2246                                  context->heap_filter())) {
2247     return check_for_visit(obj);
2248   }
2249 
2250   // field index is only valid field in reference_info
2251   reference_info.field.index = index;
2252 
2253   // for arrays we need the length, otherwise -1
2254   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2255 
2256   // invoke the callback
2257   int res = (*cb)(ref_kind,
2258                   (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2259                   wrapper.klass_tag(),
2260                   wrapper.referrer_klass_tag(),
2261                   wrapper.obj_size(),
2262                   wrapper.obj_tag_p(),
2263                   wrapper.referrer_tag_p(),
2264                   len,
2265                   (void*)user_data());
2266 
2267   if (res & JVMTI_VISIT_ABORT) {
2268     return false;
2269   }
2270   if (res & JVMTI_VISIT_OBJECTS) {
2271     check_for_visit(obj);
2272   }
2273   return true;
2274 }
2275 
2276 // report a "simple root"
2277 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2278   assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2279          kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2280   assert(ServiceUtil::visible_oop(obj), "checking");
2281 
2282   if (is_basic_heap_walk()) {
2283     // map to old style root kind
2284     jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2285     return invoke_basic_heap_root_callback(root_kind, obj);
2286   } else {
2287     assert(is_advanced_heap_walk(), "wrong heap walk type");
2288     return invoke_advanced_heap_root_callback(kind, obj);
2289   }
2290 }
2291 
2292 
2293 // invoke the primitive array values
2294 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2295   assert(obj->is_typeArray(), "not a primitive array");
2296 
2297   AdvancedHeapWalkContext* context = advanced_context();
2298   assert(context->array_primitive_value_callback() != NULL, "no callback");
2299 
2300   // apply class filter
2301   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2302     return true;
2303   }
2304 
2305   CallbackWrapper wrapper(tag_map(), obj);
2306 
2307   // apply tag filter
2308   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2309                                  wrapper.klass_tag(),
2310                                  context->heap_filter())) {
2311     return true;
2312   }
2313 
2314   // invoke the callback
2315   int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2316                                                   &wrapper,
2317                                                   obj,
2318                                                   (void*)user_data());
2319   return (!(res & JVMTI_VISIT_ABORT));
2320 }
2321 
2322 // invoke the string value callback
2323 inline bool CallbackInvoker::report_string_value(oop str) {
2324   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2325 
2326   AdvancedHeapWalkContext* context = advanced_context();
2327   assert(context->string_primitive_value_callback() != NULL, "no callback");
2328 
2329   // apply class filter
2330   if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2331     return true;
2332   }
2333 
2334   CallbackWrapper wrapper(tag_map(), str);
2335 
2336   // apply tag filter
2337   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2338                                  wrapper.klass_tag(),
2339                                  context->heap_filter())) {
2340     return true;
2341   }
2342 
2343   // invoke the callback
2344   int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2345                                          &wrapper,
2346                                          str,
2347                                          (void*)user_data());
2348   return (!(res & JVMTI_VISIT_ABORT));
2349 }
2350 
2351 // invoke the primitive field callback
2352 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2353                                                     oop obj,
2354                                                     jint index,
2355                                                     address addr,
2356                                                     char type)
2357 {
2358   // for primitive fields only the index will be set
2359   static jvmtiHeapReferenceInfo reference_info = { 0 };
2360 
2361   AdvancedHeapWalkContext* context = advanced_context();
2362   assert(context->primitive_field_callback() != NULL, "no callback");
2363 
2364   // apply class filter
2365   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2366     return true;
2367   }
2368 
2369   CallbackWrapper wrapper(tag_map(), obj);
2370 
2371   // apply tag filter
2372   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2373                                  wrapper.klass_tag(),
2374                                  context->heap_filter())) {
2375     return true;
2376   }
2377 
2378   // the field index in the referrer
2379   reference_info.field.index = index;
2380 
2381   // map the type
2382   jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2383 
2384   // setup the jvalue
2385   jvalue value;
2386   copy_to_jvalue(&value, addr, value_type);
2387 
2388   jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2389   int res = (*cb)(ref_kind,
2390                   &reference_info,
2391                   wrapper.klass_tag(),
2392                   wrapper.obj_tag_p(),
2393                   value,
2394                   value_type,
2395                   (void*)user_data());
2396   return (!(res & JVMTI_VISIT_ABORT));
2397 }
2398 
2399 
2400 // instance field
2401 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2402                                                              jint index,
2403                                                              address value,
2404                                                              char type) {
2405   return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2406                                 obj,
2407                                 index,
2408                                 value,
2409                                 type);
2410 }
2411 
2412 // static field
2413 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2414                                                            jint index,
2415                                                            address value,
2416                                                            char type) {
2417   return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2418                                 obj,
2419                                 index,
2420                                 value,
2421                                 type);
2422 }
2423 
2424 // report a JNI local (root object) to the profiler
2425 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2426   if (is_basic_heap_walk()) {
2427     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2428                                            thread_tag,
2429                                            depth,
2430                                            m,
2431                                            -1,
2432                                            obj);
2433   } else {
2434     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2435                                               thread_tag, tid,
2436                                               depth,
2437                                               m,
2438                                               (jlocation)-1,
2439                                               -1,
2440                                               obj);
2441   }
2442 }
2443 
2444 
2445 // report a local (stack reference, root object)
2446 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2447                                                    jlong tid,
2448                                                    jint depth,
2449                                                    jmethodID method,
2450                                                    jlocation bci,
2451                                                    jint slot,
2452                                                    oop obj) {
2453   if (is_basic_heap_walk()) {
2454     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2455                                            thread_tag,
2456                                            depth,
2457                                            method,
2458                                            slot,
2459                                            obj);
2460   } else {
2461     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2462                                               thread_tag,
2463                                               tid,
2464                                               depth,
2465                                               method,
2466                                               bci,
2467                                               slot,
2468                                               obj);
2469   }
2470 }
2471 
2472 // report an object referencing a class.
2473 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2474   if (is_basic_heap_walk()) {
2475     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2476   } else {
2477     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2478   }
2479 }
2480 
2481 // report a class referencing its class loader.
2482 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2483   if (is_basic_heap_walk()) {
2484     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2485   } else {
2486     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2487   }
2488 }
2489 
2490 // report a class referencing its signers.
2491 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2492   if (is_basic_heap_walk()) {
2493     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2494   } else {
2495     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2496   }
2497 }
2498 
2499 // report a class referencing its protection domain..
2500 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2501   if (is_basic_heap_walk()) {
2502     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2503   } else {
2504     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2505   }
2506 }
2507 
2508 // report a class referencing its superclass.
2509 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2510   if (is_basic_heap_walk()) {
2511     // Send this to be consistent with past implementation
2512     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2513   } else {
2514     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2515   }
2516 }
2517 
2518 // report a class referencing one of its interfaces.
2519 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2520   if (is_basic_heap_walk()) {
2521     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2522   } else {
2523     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2524   }
2525 }
2526 
2527 // report a class referencing one of its static fields.
2528 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2529   if (is_basic_heap_walk()) {
2530     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2531   } else {
2532     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2533   }
2534 }
2535 
2536 // report an array referencing an element object
2537 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2538   if (is_basic_heap_walk()) {
2539     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2540   } else {
2541     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2542   }
2543 }
2544 
2545 // report an object referencing an instance field object
2546 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2547   if (is_basic_heap_walk()) {
2548     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2549   } else {
2550     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2551   }
2552 }
2553 
2554 // report an array referencing an element object
2555 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2556   if (is_basic_heap_walk()) {
2557     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2558   } else {
2559     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2560   }
2561 }
2562 
2563 // A supporting closure used to process simple roots
2564 class SimpleRootsClosure : public OopClosure {
2565  private:
2566   jvmtiHeapReferenceKind _kind;
2567   bool _continue;
2568 
2569   jvmtiHeapReferenceKind root_kind()    { return _kind; }
2570 
2571  public:
2572   void set_kind(jvmtiHeapReferenceKind kind) {
2573     _kind = kind;
2574     _continue = true;
2575   }
2576 
2577   inline bool stopped() {
2578     return !_continue;
2579   }
2580 
2581   void do_oop(oop* obj_p) {
2582     // iteration has terminated
2583     if (stopped()) {
2584       return;
2585     }
2586 
2587     oop o = *obj_p;
2588     // ignore null
2589     if (o == NULL) {
2590       return;
2591     }
2592 
2593     assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2594 
2595     jvmtiHeapReferenceKind kind = root_kind();
2596     if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2597       // SystemDictionary::always_strong_oops_do reports the application
2598       // class loader as a root. We want this root to be reported as
2599       // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2600       if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) {
2601         kind = JVMTI_HEAP_REFERENCE_OTHER;
2602       }
2603     }
2604 
2605     // some objects are ignored - in the case of simple
2606     // roots it's mostly Symbol*s that we are skipping
2607     // here.
2608     if (!ServiceUtil::visible_oop(o)) {
2609       return;
2610     }
2611 
2612     // invoke the callback
2613     _continue = CallbackInvoker::report_simple_root(kind, o);
2614 
2615   }
2616   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2617 };
2618 
2619 // A supporting closure used to process JNI locals
2620 class JNILocalRootsClosure : public OopClosure {
2621  private:
2622   jlong _thread_tag;
2623   jlong _tid;
2624   jint _depth;
2625   jmethodID _method;
2626   bool _continue;
2627  public:
2628   void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2629     _thread_tag = thread_tag;
2630     _tid = tid;
2631     _depth = depth;
2632     _method = method;
2633     _continue = true;
2634   }
2635 
2636   inline bool stopped() {
2637     return !_continue;
2638   }
2639 
2640   void do_oop(oop* obj_p) {
2641     // iteration has terminated
2642     if (stopped()) {
2643       return;
2644     }
2645 
2646     oop o = *obj_p;
2647     // ignore null
2648     if (o == NULL) {
2649       return;
2650     }
2651 
2652     if (!ServiceUtil::visible_oop(o)) {
2653       return;
2654     }
2655 
2656     // invoke the callback
2657     _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2658   }
2659   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2660 };
2661 
2662 
2663 // A VM operation to iterate over objects that are reachable from
2664 // a set of roots or an initial object.
2665 //
2666 // For VM_HeapWalkOperation the set of roots used is :-
2667 //
2668 // - All JNI global references
2669 // - All inflated monitors
2670 // - All classes loaded by the boot class loader (or all classes
2671 //     in the event that class unloading is disabled)
2672 // - All java threads
2673 // - For each java thread then all locals and JNI local references
2674 //      on the thread's execution stack
2675 // - All visible/explainable objects from Universes::oops_do
2676 //
2677 class VM_HeapWalkOperation: public VM_Operation {
2678  private:
2679   enum {
2680     initial_visit_stack_size = 4000
2681   };
2682 
2683   bool _is_advanced_heap_walk;                      // indicates FollowReferences
2684   JvmtiTagMap* _tag_map;
2685   Handle _initial_object;
2686   GrowableArray<oop>* _visit_stack;                 // the visit stack
2687 
2688   bool _collecting_heap_roots;                      // are we collecting roots
2689   bool _following_object_refs;                      // are we following object references
2690 
2691   bool _reporting_primitive_fields;                 // optional reporting
2692   bool _reporting_primitive_array_values;
2693   bool _reporting_string_values;
2694 
2695   GrowableArray<oop>* create_visit_stack() {
2696     return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2697   }
2698 
2699   // accessors
2700   bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2701   JvmtiTagMap* tag_map() const                     { return _tag_map; }
2702   Handle initial_object() const                    { return _initial_object; }
2703 
2704   bool is_following_references() const             { return _following_object_refs; }
2705 
2706   bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2707   bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2708   bool is_reporting_string_values() const          { return _reporting_string_values; }
2709 
2710   GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2711 
2712   // iterate over the various object types
2713   inline bool iterate_over_array(oop o);
2714   inline bool iterate_over_type_array(oop o);
2715   inline bool iterate_over_class(oop o);
2716   inline bool iterate_over_object(oop o);
2717 
2718   // root collection
2719   inline bool collect_simple_roots();
2720   inline bool collect_stack_roots();
2721   inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2722 
2723   // visit an object
2724   inline bool visit(oop o);
2725 
2726  public:
2727   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2728                        Handle initial_object,
2729                        BasicHeapWalkContext callbacks,
2730                        const void* user_data);
2731 
2732   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2733                        Handle initial_object,
2734                        AdvancedHeapWalkContext callbacks,
2735                        const void* user_data);
2736 
2737   ~VM_HeapWalkOperation();
2738 
2739   VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2740   void doit();
2741 };
2742 
2743 
2744 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2745                                            Handle initial_object,
2746                                            BasicHeapWalkContext callbacks,
2747                                            const void* user_data) {
2748   _is_advanced_heap_walk = false;
2749   _tag_map = tag_map;
2750   _initial_object = initial_object;
2751   _following_object_refs = (callbacks.object_ref_callback() != NULL);
2752   _reporting_primitive_fields = false;
2753   _reporting_primitive_array_values = false;
2754   _reporting_string_values = false;
2755   _visit_stack = create_visit_stack();
2756 
2757 
2758   CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2759 }
2760 
2761 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2762                                            Handle initial_object,
2763                                            AdvancedHeapWalkContext callbacks,
2764                                            const void* user_data) {
2765   _is_advanced_heap_walk = true;
2766   _tag_map = tag_map;
2767   _initial_object = initial_object;
2768   _following_object_refs = true;
2769   _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2770   _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2771   _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2772   _visit_stack = create_visit_stack();
2773 
2774   CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2775 }
2776 
2777 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2778   if (_following_object_refs) {
2779     assert(_visit_stack != NULL, "checking");
2780     delete _visit_stack;
2781     _visit_stack = NULL;
2782   }
2783 }
2784 
2785 // an array references its class and has a reference to
2786 // each element in the array
2787 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2788   objArrayOop array = objArrayOop(o);
2789 
2790   // array reference to its class
2791   oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2792   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2793     return false;
2794   }
2795 
2796   // iterate over the array and report each reference to a
2797   // non-null element
2798   for (int index=0; index<array->length(); index++) {
2799     oop elem = array->obj_at(index);
2800     if (elem == NULL) {
2801       continue;
2802     }
2803 
2804     // report the array reference o[index] = elem
2805     if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2806       return false;
2807     }
2808   }
2809   return true;
2810 }
2811 
2812 // a type array references its class
2813 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2814   Klass* k = o->klass();
2815   oop mirror = k->java_mirror();
2816   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2817     return false;
2818   }
2819 
2820   // report the array contents if required
2821   if (is_reporting_primitive_array_values()) {
2822     if (!CallbackInvoker::report_primitive_array_values(o)) {
2823       return false;
2824     }
2825   }
2826   return true;
2827 }
2828 
2829 #ifdef ASSERT
2830 // verify that a static oop field is in range
2831 static inline bool verify_static_oop(InstanceKlass* ik,
2832                                      oop mirror, int offset) {
2833   address obj_p = (address)mirror + offset;
2834   address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2835   address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2836   assert(end >= start, "sanity check");
2837 
2838   if (obj_p >= start && obj_p < end) {
2839     return true;
2840   } else {
2841     return false;
2842   }
2843 }
2844 #endif // #ifdef ASSERT
2845 
2846 // a class references its super class, interfaces, class loader, ...
2847 // and finally its static fields
2848 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2849   int i;
2850   Klass* klass = java_lang_Class::as_Klass(java_class);
2851 
2852   if (klass->is_instance_klass()) {
2853     InstanceKlass* ik = InstanceKlass::cast(klass);
2854 
2855     // Ignore the class if it hasn't been initialized yet
2856     if (!ik->is_linked()) {
2857       return true;
2858     }
2859 
2860     // get the java mirror
2861     oop mirror = klass->java_mirror();
2862 
2863     // super (only if something more interesting than java.lang.Object)
2864     Klass* java_super = ik->java_super();
2865     if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2866       oop super = java_super->java_mirror();
2867       if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2868         return false;
2869       }
2870     }
2871 
2872     // class loader
2873     oop cl = ik->class_loader();
2874     if (cl != NULL) {
2875       if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2876         return false;
2877       }
2878     }
2879 
2880     // protection domain
2881     oop pd = ik->protection_domain();
2882     if (pd != NULL) {
2883       if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2884         return false;
2885       }
2886     }
2887 
2888     // signers
2889     oop signers = ik->signers();
2890     if (signers != NULL) {
2891       if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2892         return false;
2893       }
2894     }
2895 
2896     // references from the constant pool
2897     {
2898       ConstantPool* pool = ik->constants();
2899       for (int i = 1; i < pool->length(); i++) {
2900         constantTag tag = pool->tag_at(i).value();
2901         if (tag.is_string() || tag.is_klass()) {
2902           oop entry;
2903           if (tag.is_string()) {
2904             entry = pool->resolved_string_at(i);
2905             // If the entry is non-null it is resolved.
2906             if (entry == NULL) continue;
2907           } else {
2908             entry = pool->resolved_klass_at(i)->java_mirror();
2909           }
2910           if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2911             return false;
2912           }
2913         }
2914       }
2915     }
2916 
2917     // interfaces
2918     // (These will already have been reported as references from the constant pool
2919     //  but are specified by IterateOverReachableObjects and must be reported).
2920     Array<Klass*>* interfaces = ik->local_interfaces();
2921     for (i = 0; i < interfaces->length(); i++) {
2922       oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2923       if (interf == NULL) {
2924         continue;
2925       }
2926       if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2927         return false;
2928       }
2929     }
2930 
2931     // iterate over the static fields
2932 
2933     ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2934     for (i=0; i<field_map->field_count(); i++) {
2935       ClassFieldDescriptor* field = field_map->field_at(i);
2936       char type = field->field_type();
2937       if (!is_primitive_field_type(type)) {
2938         oop fld_o = mirror->obj_field(field->field_offset());
2939         assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2940         if (fld_o != NULL) {
2941           int slot = field->field_index();
2942           if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2943             delete field_map;
2944             return false;
2945           }
2946         }
2947       } else {
2948          if (is_reporting_primitive_fields()) {
2949            address addr = (address)mirror + field->field_offset();
2950            int slot = field->field_index();
2951            if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2952              delete field_map;
2953              return false;
2954           }
2955         }
2956       }
2957     }
2958     delete field_map;
2959 
2960     return true;
2961   }
2962 
2963   return true;
2964 }
2965 
2966 // an object references a class and its instance fields
2967 // (static fields are ignored here as we report these as
2968 // references from the class).
2969 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2970   // reference to the class
2971   if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2972     return false;
2973   }
2974 
2975   // iterate over instance fields
2976   ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2977   for (int i=0; i<field_map->field_count(); i++) {
2978     ClassFieldDescriptor* field = field_map->field_at(i);
2979     char type = field->field_type();
2980     if (!is_primitive_field_type(type)) {
2981       oop fld_o = o->obj_field(field->field_offset());
2982       // ignore any objects that aren't visible to profiler
2983       if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2984         assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2985                "have references to Klass* anymore");
2986         int slot = field->field_index();
2987         if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2988           return false;
2989         }
2990       }
2991     } else {
2992       if (is_reporting_primitive_fields()) {
2993         // primitive instance field
2994         address addr = (address)o + field->field_offset();
2995         int slot = field->field_index();
2996         if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2997           return false;
2998         }
2999       }
3000     }
3001   }
3002 
3003   // if the object is a java.lang.String
3004   if (is_reporting_string_values() &&
3005       o->klass() == SystemDictionary::String_klass()) {
3006     if (!CallbackInvoker::report_string_value(o)) {
3007       return false;
3008     }
3009   }
3010   return true;
3011 }
3012 
3013 
3014 // Collects all simple (non-stack) roots except for threads;
3015 // threads are handled in collect_stack_roots() as an optimization.
3016 // if there's a heap root callback provided then the callback is
3017 // invoked for each simple root.
3018 // if an object reference callback is provided then all simple
3019 // roots are pushed onto the marking stack so that they can be
3020 // processed later
3021 //
3022 inline bool VM_HeapWalkOperation::collect_simple_roots() {
3023   SimpleRootsClosure blk;
3024 
3025   // JNI globals
3026   blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
3027   JNIHandles::oops_do(&blk);
3028   if (blk.stopped()) {
3029     return false;
3030   }
3031 
3032   // Preloaded classes and loader from the system dictionary
3033   blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
3034   SystemDictionary::always_strong_oops_do(&blk);
3035   ClassLoaderDataGraph::always_strong_oops_do(&blk, false);
3036   if (blk.stopped()) {
3037     return false;
3038   }
3039 
3040   // Inflated monitors
3041   blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3042   ObjectSynchronizer::oops_do(&blk);
3043   if (blk.stopped()) {
3044     return false;
3045   }
3046 
3047   // threads are now handled in collect_stack_roots()
3048 
3049   // Other kinds of roots maintained by HotSpot
3050   // Many of these won't be visible but others (such as instances of important
3051   // exceptions) will be visible.
3052   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3053   Universe::oops_do(&blk);
3054 
3055   // If there are any non-perm roots in the code cache, visit them.
3056   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3057   CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations);
3058   CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3059 
3060   return true;
3061 }
3062 
3063 // Walk the stack of a given thread and find all references (locals
3064 // and JNI calls) and report these as stack references
3065 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3066                                                       JNILocalRootsClosure* blk)
3067 {
3068   oop threadObj = java_thread->threadObj();
3069   assert(threadObj != NULL, "sanity check");
3070 
3071   // only need to get the thread's tag once per thread
3072   jlong thread_tag = tag_for(_tag_map, threadObj);
3073 
3074   // also need the thread id
3075   jlong tid = java_lang_Thread::thread_id(threadObj);
3076 
3077 
3078   if (java_thread->has_last_Java_frame()) {
3079 
3080     // vframes are resource allocated
3081     Thread* current_thread = Thread::current();
3082     ResourceMark rm(current_thread);
3083     HandleMark hm(current_thread);
3084 
3085     RegisterMap reg_map(java_thread);
3086     frame f = java_thread->last_frame();
3087     vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
3088 
3089     bool is_top_frame = true;
3090     int depth = 0;
3091     frame* last_entry_frame = NULL;
3092 
3093     while (vf != NULL) {
3094       if (vf->is_java_frame()) {
3095 
3096         // java frame (interpreted, compiled, ...)
3097         javaVFrame *jvf = javaVFrame::cast(vf);
3098 
3099         // the jmethodID
3100         jmethodID method = jvf->method()->jmethod_id();
3101 
3102         if (!(jvf->method()->is_native())) {
3103           jlocation bci = (jlocation)jvf->bci();
3104           StackValueCollection* locals = jvf->locals();
3105           for (int slot=0; slot<locals->size(); slot++) {
3106             if (locals->at(slot)->type() == T_OBJECT) {
3107               oop o = locals->obj_at(slot)();
3108               if (o == NULL) {
3109                 continue;
3110               }
3111 
3112               // stack reference
3113               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3114                                                    bci, slot, o)) {
3115                 return false;
3116               }
3117             }
3118           }
3119 
3120           StackValueCollection* exprs = jvf->expressions();
3121           for (int index=0; index < exprs->size(); index++) {
3122             if (exprs->at(index)->type() == T_OBJECT) {
3123               oop o = exprs->obj_at(index)();
3124               if (o == NULL) {
3125                 continue;
3126               }
3127 
3128               // stack reference
3129               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3130                                                    bci, locals->size() + index, o)) {
3131                 return false;
3132               }
3133             }
3134           }
3135 
3136           // Follow oops from compiled nmethod
3137           if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
3138             blk->set_context(thread_tag, tid, depth, method);
3139             jvf->cb()->as_nmethod()->oops_do(blk);
3140           }
3141         } else {
3142           blk->set_context(thread_tag, tid, depth, method);
3143           if (is_top_frame) {
3144             // JNI locals for the top frame.
3145             java_thread->active_handles()->oops_do(blk);
3146           } else {
3147             if (last_entry_frame != NULL) {
3148               // JNI locals for the entry frame
3149               assert(last_entry_frame->is_entry_frame(), "checking");
3150               last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3151             }
3152           }
3153         }
3154         last_entry_frame = NULL;
3155         depth++;
3156       } else {
3157         // externalVFrame - for an entry frame then we report the JNI locals
3158         // when we find the corresponding javaVFrame
3159         frame* fr = vf->frame_pointer();
3160         assert(fr != NULL, "sanity check");
3161         if (fr->is_entry_frame()) {
3162           last_entry_frame = fr;
3163         }
3164       }
3165 
3166       vf = vf->sender();
3167       is_top_frame = false;
3168     }
3169   } else {
3170     // no last java frame but there may be JNI locals
3171     blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3172     java_thread->active_handles()->oops_do(blk);
3173   }
3174   return true;
3175 }
3176 
3177 
3178 // Collects the simple roots for all threads and collects all
3179 // stack roots - for each thread it walks the execution
3180 // stack to find all references and local JNI refs.
3181 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3182   JNILocalRootsClosure blk;
3183   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
3184     oop threadObj = thread->threadObj();
3185     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3186       // Collect the simple root for this thread before we
3187       // collect its stack roots
3188       if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3189                                                threadObj)) {
3190         return false;
3191       }
3192       if (!collect_stack_roots(thread, &blk)) {
3193         return false;
3194       }
3195     }
3196   }
3197   return true;
3198 }
3199 
3200 // visit an object
3201 // first mark the object as visited
3202 // second get all the outbound references from this object (in other words, all
3203 // the objects referenced by this object).
3204 //
3205 bool VM_HeapWalkOperation::visit(oop o) {
3206   // mark object as visited
3207   assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3208   ObjectMarker::mark(o);
3209 
3210   // instance
3211   if (o->is_instance()) {
3212     if (o->klass() == SystemDictionary::Class_klass()) {
3213       if (!java_lang_Class::is_primitive(o)) {
3214         // a java.lang.Class
3215         return iterate_over_class(o);
3216       }
3217     } else {
3218       return iterate_over_object(o);
3219     }
3220   }
3221 
3222   // object array
3223   if (o->is_objArray()) {
3224     return iterate_over_array(o);
3225   }
3226 
3227   // type array
3228   if (o->is_typeArray()) {
3229     return iterate_over_type_array(o);
3230   }
3231 
3232   return true;
3233 }
3234 
3235 void VM_HeapWalkOperation::doit() {
3236   ResourceMark rm;
3237   ObjectMarkerController marker;
3238   ClassFieldMapCacheMark cm;
3239 
3240   assert(visit_stack()->is_empty(), "visit stack must be empty");
3241 
3242   // the heap walk starts with an initial object or the heap roots
3243   if (initial_object().is_null()) {
3244     // If either collect_stack_roots() or collect_simple_roots()
3245     // returns false at this point, then there are no mark bits
3246     // to reset.
3247     ObjectMarker::set_needs_reset(false);
3248 
3249     // Calling collect_stack_roots() before collect_simple_roots()
3250     // can result in a big performance boost for an agent that is
3251     // focused on analyzing references in the thread stacks.
3252     if (!collect_stack_roots()) return;
3253 
3254     if (!collect_simple_roots()) return;
3255 
3256     // no early return so enable heap traversal to reset the mark bits
3257     ObjectMarker::set_needs_reset(true);
3258   } else {
3259     visit_stack()->push(initial_object()());
3260   }
3261 
3262   // object references required
3263   if (is_following_references()) {
3264 
3265     // visit each object until all reachable objects have been
3266     // visited or the callback asked to terminate the iteration.
3267     while (!visit_stack()->is_empty()) {
3268       oop o = visit_stack()->pop();
3269       if (!ObjectMarker::visited(o)) {
3270         if (!visit(o)) {
3271           break;
3272         }
3273       }
3274     }
3275   }
3276 }
3277 
3278 // iterate over all objects that are reachable from a set of roots
3279 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3280                                                  jvmtiStackReferenceCallback stack_ref_callback,
3281                                                  jvmtiObjectReferenceCallback object_ref_callback,
3282                                                  const void* user_data) {
3283   MutexLocker ml(Heap_lock);
3284   BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3285   VM_HeapWalkOperation op(this, Handle(), context, user_data);
3286   VMThread::execute(&op);
3287 }
3288 
3289 // iterate over all objects that are reachable from a given object
3290 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3291                                                              jvmtiObjectReferenceCallback object_ref_callback,
3292                                                              const void* user_data) {
3293   oop obj = JNIHandles::resolve(object);
3294   Handle initial_object(Thread::current(), obj);
3295 
3296   MutexLocker ml(Heap_lock);
3297   BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3298   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3299   VMThread::execute(&op);
3300 }
3301 
3302 // follow references from an initial object or the GC roots
3303 void JvmtiTagMap::follow_references(jint heap_filter,
3304                                     Klass* klass,
3305                                     jobject object,
3306                                     const jvmtiHeapCallbacks* callbacks,
3307                                     const void* user_data)
3308 {
3309   oop obj = JNIHandles::resolve(object);
3310   Handle initial_object(Thread::current(), obj);
3311 
3312   MutexLocker ml(Heap_lock);
3313   AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3314   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3315   VMThread::execute(&op);
3316 }
3317 
3318 
3319 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3320   // No locks during VM bring-up (0 threads) and no safepoints after main
3321   // thread creation and before VMThread creation (1 thread); initial GC
3322   // verification can happen in that window which gets to here.
3323   assert(Threads::number_of_threads() <= 1 ||
3324          SafepointSynchronize::is_at_safepoint(),
3325          "must be executed at a safepoint");
3326   if (JvmtiEnv::environments_might_exist()) {
3327     JvmtiEnvIterator it;
3328     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3329       JvmtiTagMap* tag_map = env->tag_map();
3330       if (tag_map != NULL && !tag_map->is_empty()) {
3331         tag_map->do_weak_oops(is_alive, f);
3332       }
3333     }
3334   }
3335 }
3336 
3337 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3338 
3339   // does this environment have the OBJECT_FREE event enabled
3340   bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3341 
3342   // counters used for trace message
3343   int freed = 0;
3344   int moved = 0;
3345 
3346   JvmtiTagHashmap* hashmap = this->hashmap();
3347 
3348   // reenable sizing (if disabled)
3349   hashmap->set_resizing_enabled(true);
3350 
3351   // if the hashmap is empty then we can skip it
3352   if (hashmap->_entry_count == 0) {
3353     return;
3354   }
3355 
3356   // now iterate through each entry in the table
3357 
3358   JvmtiTagHashmapEntry** table = hashmap->table();
3359   int size = hashmap->size();
3360 
3361   JvmtiTagHashmapEntry* delayed_add = NULL;
3362 
3363   for (int pos = 0; pos < size; ++pos) {
3364     JvmtiTagHashmapEntry* entry = table[pos];
3365     JvmtiTagHashmapEntry* prev = NULL;
3366 
3367     while (entry != NULL) {
3368       JvmtiTagHashmapEntry* next = entry->next();
3369 
3370       // has object been GC'ed
3371       if (!is_alive->do_object_b(entry->object_peek())) {
3372         // grab the tag
3373         jlong tag = entry->tag();
3374         guarantee(tag != 0, "checking");
3375 
3376         // remove GC'ed entry from hashmap and return the
3377         // entry to the free list
3378         hashmap->remove(prev, pos, entry);
3379         destroy_entry(entry);
3380 
3381         // post the event to the profiler
3382         if (post_object_free) {
3383           JvmtiExport::post_object_free(env(), tag);
3384         }
3385 
3386         ++freed;
3387       } else {
3388         f->do_oop(entry->object_addr());
3389         oop new_oop = entry->object_peek();
3390 
3391         // if the object has moved then re-hash it and move its
3392         // entry to its new location.
3393         unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3394         if (new_pos != (unsigned int)pos) {
3395           if (prev == NULL) {
3396             table[pos] = next;
3397           } else {
3398             prev->set_next(next);
3399           }
3400           if (new_pos < (unsigned int)pos) {
3401             entry->set_next(table[new_pos]);
3402             table[new_pos] = entry;
3403           } else {
3404             // Delay adding this entry to it's new position as we'd end up
3405             // hitting it again during this iteration.
3406             entry->set_next(delayed_add);
3407             delayed_add = entry;
3408           }
3409           moved++;
3410         } else {
3411           // object didn't move
3412           prev = entry;
3413         }
3414       }
3415 
3416       entry = next;
3417     }
3418   }
3419 
3420   // Re-add all the entries which were kept aside
3421   while (delayed_add != NULL) {
3422     JvmtiTagHashmapEntry* next = delayed_add->next();
3423     unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object_peek(), size);
3424     delayed_add->set_next(table[pos]);
3425     table[pos] = delayed_add;
3426     delayed_add = next;
3427   }
3428 
3429   log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)",
3430                                   hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved);
3431 }