1 /*
   2  * Copyright (c) 2003, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.inline.hpp"
  27 #include "classfile/symbolTable.hpp"
  28 #include "classfile/systemDictionary.hpp"
  29 #include "classfile/vmSymbols.hpp"
  30 #include "code/codeCache.hpp"
  31 #include "jvmtifiles/jvmtiEnv.hpp"
  32 #include "memory/allocation.inline.hpp"
  33 #include "memory/resourceArea.hpp"
  34 #include "oops/access.inline.hpp"
  35 #include "oops/arrayOop.inline.hpp"
  36 #include "oops/constantPool.inline.hpp"
  37 #include "oops/instanceMirrorKlass.hpp"
  38 #include "oops/objArrayKlass.hpp"
  39 #include "oops/objArrayOop.inline.hpp"
  40 #include "oops/oop.inline.hpp"
  41 #include "oops/typeArrayOop.inline.hpp"
  42 #include "prims/jvmtiEventController.hpp"
  43 #include "prims/jvmtiEventController.inline.hpp"
  44 #include "prims/jvmtiExport.hpp"
  45 #include "prims/jvmtiImpl.hpp"
  46 #include "prims/jvmtiTagMap.hpp"
  47 #include "runtime/biasedLocking.hpp"
  48 #include "runtime/javaCalls.hpp"
  49 #include "runtime/jniHandles.inline.hpp"
  50 #include "runtime/mutex.hpp"
  51 #include "runtime/mutexLocker.hpp"
  52 #include "runtime/reflectionUtils.hpp"
  53 #include "runtime/thread.inline.hpp"
  54 #include "runtime/threadSMR.hpp"
  55 #include "runtime/vframe.hpp"
  56 #include "runtime/vmThread.hpp"
  57 #include "runtime/vm_operations.hpp"
  58 #include "services/serviceUtil.hpp"
  59 #include "utilities/macros.hpp"
  60 
  61 // JvmtiTagHashmapEntry
  62 //
  63 // Each entry encapsulates a reference to the tagged object
  64 // and the tag value. In addition an entry includes a next pointer which
  65 // is used to chain entries together.
  66 
  67 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
  68  private:
  69   friend class JvmtiTagMap;
  70 
  71   oop _object;                          // tagged object
  72   jlong _tag;                           // the tag
  73   JvmtiTagHashmapEntry* _next;          // next on the list
  74 
  75   inline void init(oop object, jlong tag) {
  76     _object = object;
  77     _tag = tag;
  78     _next = NULL;
  79   }
  80 
  81   // constructor
  82   JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); }
  83 
  84  public:
  85 
  86   // accessor methods
  87   inline oop* object_addr() { return &_object; }
  88   inline oop object()       { return RootAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); }
  89   // Peek at the object without keeping it alive. The returned object must be
  90   // kept alive using a normal access if it leaks out of a thread transition from VM.
  91   inline oop object_peek()  {
  92     return RootAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr());
  93   }
  94   inline jlong tag() const  { return _tag; }
  95 
  96   inline void set_tag(jlong tag) {
  97     assert(tag != 0, "can't be zero");
  98     _tag = tag;
  99   }
 100 
 101   inline bool equals(oop object) {
 102     return object == object_peek();
 103   }
 104 
 105   inline JvmtiTagHashmapEntry* next() const        { return _next; }
 106   inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
 107 };
 108 
 109 
 110 // JvmtiTagHashmap
 111 //
 112 // A hashmap is essentially a table of pointers to entries. Entries
 113 // are hashed to a location, or position in the table, and then
 114 // chained from that location. The "key" for hashing is address of
 115 // the object, or oop. The "value" is the tag value.
 116 //
 117 // A hashmap maintains a count of the number entries in the hashmap
 118 // and resizes if the number of entries exceeds a given threshold.
 119 // The threshold is specified as a percentage of the size - for
 120 // example a threshold of 0.75 will trigger the hashmap to resize
 121 // if the number of entries is >75% of table size.
 122 //
 123 // A hashmap provides functions for adding, removing, and finding
 124 // entries. It also provides a function to iterate over all entries
 125 // in the hashmap.
 126 
 127 class JvmtiTagHashmap : public CHeapObj<mtInternal> {
 128  private:
 129   friend class JvmtiTagMap;
 130 
 131   enum {
 132     small_trace_threshold  = 10000,                  // threshold for tracing
 133     medium_trace_threshold = 100000,
 134     large_trace_threshold  = 1000000,
 135     initial_trace_threshold = small_trace_threshold
 136   };
 137 
 138   static int _sizes[];                  // array of possible hashmap sizes
 139   int _size;                            // actual size of the table
 140   int _size_index;                      // index into size table
 141 
 142   int _entry_count;                     // number of entries in the hashmap
 143 
 144   float _load_factor;                   // load factor as a % of the size
 145   int _resize_threshold;                // computed threshold to trigger resizing.
 146   bool _resizing_enabled;               // indicates if hashmap can resize
 147 
 148   int _trace_threshold;                 // threshold for trace messages
 149 
 150   JvmtiTagHashmapEntry** _table;        // the table of entries.
 151 
 152   // private accessors
 153   int resize_threshold() const                  { return _resize_threshold; }
 154   int trace_threshold() const                   { return _trace_threshold; }
 155 
 156   // initialize the hashmap
 157   void init(int size_index=0, float load_factor=4.0f) {
 158     int initial_size =  _sizes[size_index];
 159     _size_index = size_index;
 160     _size = initial_size;
 161     _entry_count = 0;
 162     _trace_threshold = initial_trace_threshold;
 163     _load_factor = load_factor;
 164     _resize_threshold = (int)(_load_factor * _size);
 165     _resizing_enabled = true;
 166     size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
 167     _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
 168     if (_table == NULL) {
 169       vm_exit_out_of_memory(s, OOM_MALLOC_ERROR,
 170         "unable to allocate initial hashtable for jvmti object tags");
 171     }
 172     for (int i=0; i<initial_size; i++) {
 173       _table[i] = NULL;
 174     }
 175   }
 176 
 177   // hash a given key (oop) with the specified size
 178   static unsigned int hash(oop key, int size) {
 179     // shift right to get better distribution (as these bits will be zero
 180     // with aligned addresses)
 181     unsigned int addr = (unsigned int)(cast_from_oop<intptr_t>(key));
 182 #ifdef _LP64
 183     return (addr >> 3) % size;
 184 #else
 185     return (addr >> 2) % size;
 186 #endif
 187   }
 188 
 189   // hash a given key (oop)
 190   unsigned int hash(oop key) {
 191     return hash(key, _size);
 192   }
 193 
 194   // resize the hashmap - allocates a large table and re-hashes
 195   // all entries into the new table.
 196   void resize() {
 197     int new_size_index = _size_index+1;
 198     int new_size = _sizes[new_size_index];
 199     if (new_size < 0) {
 200       // hashmap already at maximum capacity
 201       return;
 202     }
 203 
 204     // allocate new table
 205     size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
 206     JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
 207     if (new_table == NULL) {
 208       warning("unable to allocate larger hashtable for jvmti object tags");
 209       set_resizing_enabled(false);
 210       return;
 211     }
 212 
 213     // initialize new table
 214     int i;
 215     for (i=0; i<new_size; i++) {
 216       new_table[i] = NULL;
 217     }
 218 
 219     // rehash all entries into the new table
 220     for (i=0; i<_size; i++) {
 221       JvmtiTagHashmapEntry* entry = _table[i];
 222       while (entry != NULL) {
 223         JvmtiTagHashmapEntry* next = entry->next();
 224         oop key = entry->object_peek();
 225         assert(key != NULL, "jni weak reference cleared!!");
 226         unsigned int h = hash(key, new_size);
 227         JvmtiTagHashmapEntry* anchor = new_table[h];
 228         if (anchor == NULL) {
 229           new_table[h] = entry;
 230           entry->set_next(NULL);
 231         } else {
 232           entry->set_next(anchor);
 233           new_table[h] = entry;
 234         }
 235         entry = next;
 236       }
 237     }
 238 
 239     // free old table and update settings.
 240     os::free((void*)_table);
 241     _table = new_table;
 242     _size_index = new_size_index;
 243     _size = new_size;
 244 
 245     // compute new resize threshold
 246     _resize_threshold = (int)(_load_factor * _size);
 247   }
 248 
 249 
 250   // internal remove function - remove an entry at a given position in the
 251   // table.
 252   inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
 253     assert(pos >= 0 && pos < _size, "out of range");
 254     if (prev == NULL) {
 255       _table[pos] = entry->next();
 256     } else {
 257       prev->set_next(entry->next());
 258     }
 259     assert(_entry_count > 0, "checking");
 260     _entry_count--;
 261   }
 262 
 263   // resizing switch
 264   bool is_resizing_enabled() const          { return _resizing_enabled; }
 265   void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
 266 
 267   // debugging
 268   void print_memory_usage();
 269   void compute_next_trace_threshold();
 270 
 271  public:
 272 
 273   // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
 274   // The preferred size is rounded down to an actual size.
 275   JvmtiTagHashmap(int size, float load_factor=0.0f) {
 276     int i=0;
 277     while (_sizes[i] < size) {
 278       if (_sizes[i] < 0) {
 279         assert(i > 0, "sanity check");
 280         i--;
 281         break;
 282       }
 283       i++;
 284     }
 285 
 286     // if a load factor is specified then use it, otherwise use default
 287     if (load_factor > 0.01f) {
 288       init(i, load_factor);
 289     } else {
 290       init(i);
 291     }
 292   }
 293 
 294   // create a JvmtiTagHashmap with default settings
 295   JvmtiTagHashmap() {
 296     init();
 297   }
 298 
 299   // release table when JvmtiTagHashmap destroyed
 300   ~JvmtiTagHashmap() {
 301     if (_table != NULL) {
 302       os::free((void*)_table);
 303       _table = NULL;
 304     }
 305   }
 306 
 307   // accessors
 308   int size() const                              { return _size; }
 309   JvmtiTagHashmapEntry** table() const          { return _table; }
 310   int entry_count() const                       { return _entry_count; }
 311 
 312   // find an entry in the hashmap, returns NULL if not found.
 313   inline JvmtiTagHashmapEntry* find(oop key) {
 314     unsigned int h = hash(key);
 315     JvmtiTagHashmapEntry* entry = _table[h];
 316     while (entry != NULL) {
 317       if (entry->equals(key)) {
 318          return entry;
 319       }
 320       entry = entry->next();
 321     }
 322     return NULL;
 323   }
 324 
 325 
 326   // add a new entry to hashmap
 327   inline void add(oop key, JvmtiTagHashmapEntry* entry) {
 328     assert(key != NULL, "checking");
 329     assert(find(key) == NULL, "duplicate detected");
 330     unsigned int h = hash(key);
 331     JvmtiTagHashmapEntry* anchor = _table[h];
 332     if (anchor == NULL) {
 333       _table[h] = entry;
 334       entry->set_next(NULL);
 335     } else {
 336       entry->set_next(anchor);
 337       _table[h] = entry;
 338     }
 339 
 340     _entry_count++;
 341     if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) {
 342       print_memory_usage();
 343       compute_next_trace_threshold();
 344     }
 345 
 346     // if the number of entries exceed the threshold then resize
 347     if (entry_count() > resize_threshold() && is_resizing_enabled()) {
 348       resize();
 349     }
 350   }
 351 
 352   // remove an entry with the given key.
 353   inline JvmtiTagHashmapEntry* remove(oop key) {
 354     unsigned int h = hash(key);
 355     JvmtiTagHashmapEntry* entry = _table[h];
 356     JvmtiTagHashmapEntry* prev = NULL;
 357     while (entry != NULL) {
 358       if (entry->equals(key)) {
 359         break;
 360       }
 361       prev = entry;
 362       entry = entry->next();
 363     }
 364     if (entry != NULL) {
 365       remove(prev, h, entry);
 366     }
 367     return entry;
 368   }
 369 
 370   // iterate over all entries in the hashmap
 371   void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
 372 };
 373 
 374 // possible hashmap sizes - odd primes that roughly double in size.
 375 // To avoid excessive resizing the odd primes from 4801-76831 and
 376 // 76831-307261 have been removed. The list must be terminated by -1.
 377 int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
 378     2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
 379 
 380 
 381 // A supporting class for iterating over all entries in Hashmap
 382 class JvmtiTagHashmapEntryClosure {
 383  public:
 384   virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
 385 };
 386 
 387 
 388 // iterate over all entries in the hashmap
 389 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 390   for (int i=0; i<_size; i++) {
 391     JvmtiTagHashmapEntry* entry = _table[i];
 392     JvmtiTagHashmapEntry* prev = NULL;
 393     while (entry != NULL) {
 394       // obtain the next entry before invoking do_entry - this is
 395       // necessary because do_entry may remove the entry from the
 396       // hashmap.
 397       JvmtiTagHashmapEntry* next = entry->next();
 398       closure->do_entry(entry);
 399       entry = next;
 400      }
 401   }
 402 }
 403 
 404 // debugging
 405 void JvmtiTagHashmap::print_memory_usage() {
 406   intptr_t p = (intptr_t)this;
 407   tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
 408 
 409   // table + entries in KB
 410   int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
 411     entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
 412 
 413   int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
 414   tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
 415     entry_count(), hashmap_usage, weak_globals_usage);
 416 }
 417 
 418 // compute threshold for the next trace message
 419 void JvmtiTagHashmap::compute_next_trace_threshold() {
 420   _trace_threshold = entry_count();
 421   if (trace_threshold() < medium_trace_threshold) {
 422     _trace_threshold += small_trace_threshold;
 423   } else {
 424     if (trace_threshold() < large_trace_threshold) {
 425       _trace_threshold += medium_trace_threshold;
 426     } else {
 427       _trace_threshold += large_trace_threshold;
 428     }
 429   }
 430 }
 431 
 432 // create a JvmtiTagMap
 433 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
 434   _env(env),
 435   _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
 436   _free_entries(NULL),
 437   _free_entries_count(0)
 438 {
 439   assert(JvmtiThreadState_lock->is_locked(), "sanity check");
 440   assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
 441 
 442   _hashmap = new JvmtiTagHashmap();
 443 
 444   // finally add us to the environment
 445   ((JvmtiEnvBase *)env)->set_tag_map(this);
 446 }
 447 
 448 
 449 // destroy a JvmtiTagMap
 450 JvmtiTagMap::~JvmtiTagMap() {
 451 
 452   // no lock acquired as we assume the enclosing environment is
 453   // also being destroryed.
 454   ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
 455 
 456   JvmtiTagHashmapEntry** table = _hashmap->table();
 457   for (int j = 0; j < _hashmap->size(); j++) {
 458     JvmtiTagHashmapEntry* entry = table[j];
 459     while (entry != NULL) {
 460       JvmtiTagHashmapEntry* next = entry->next();
 461       delete entry;
 462       entry = next;
 463     }
 464   }
 465 
 466   // finally destroy the hashmap
 467   delete _hashmap;
 468   _hashmap = NULL;
 469 
 470   // remove any entries on the free list
 471   JvmtiTagHashmapEntry* entry = _free_entries;
 472   while (entry != NULL) {
 473     JvmtiTagHashmapEntry* next = entry->next();
 474     delete entry;
 475     entry = next;
 476   }
 477   _free_entries = NULL;
 478 }
 479 
 480 // create a hashmap entry
 481 // - if there's an entry on the (per-environment) free list then this
 482 // is returned. Otherwise an new entry is allocated.
 483 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
 484   assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
 485   JvmtiTagHashmapEntry* entry;
 486   if (_free_entries == NULL) {
 487     entry = new JvmtiTagHashmapEntry(ref, tag);
 488   } else {
 489     assert(_free_entries_count > 0, "mismatched _free_entries_count");
 490     _free_entries_count--;
 491     entry = _free_entries;
 492     _free_entries = entry->next();
 493     entry->init(ref, tag);
 494   }
 495   return entry;
 496 }
 497 
 498 // destroy an entry by returning it to the free list
 499 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
 500   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 501   // limit the size of the free list
 502   if (_free_entries_count >= max_free_entries) {
 503     delete entry;
 504   } else {
 505     entry->set_next(_free_entries);
 506     _free_entries = entry;
 507     _free_entries_count++;
 508   }
 509 }
 510 
 511 // returns the tag map for the given environments. If the tag map
 512 // doesn't exist then it is created.
 513 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
 514   JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
 515   if (tag_map == NULL) {
 516     MutexLocker mu(JvmtiThreadState_lock);
 517     tag_map = ((JvmtiEnvBase*)env)->tag_map();
 518     if (tag_map == NULL) {
 519       tag_map = new JvmtiTagMap(env);
 520     }
 521   } else {
 522     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 523   }
 524   return tag_map;
 525 }
 526 
 527 // iterate over all entries in the tag map.
 528 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 529   hashmap()->entry_iterate(closure);
 530 }
 531 
 532 // returns true if the hashmaps are empty
 533 bool JvmtiTagMap::is_empty() {
 534   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 535   return hashmap()->entry_count() == 0;
 536 }
 537 
 538 
 539 // Return the tag value for an object, or 0 if the object is
 540 // not tagged
 541 //
 542 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
 543   JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
 544   if (entry == NULL) {
 545     return 0;
 546   } else {
 547     return entry->tag();
 548   }
 549 }
 550 
 551 
 552 // A CallbackWrapper is a support class for querying and tagging an object
 553 // around a callback to a profiler. The constructor does pre-callback
 554 // work to get the tag value, klass tag value, ... and the destructor
 555 // does the post-callback work of tagging or untagging the object.
 556 //
 557 // {
 558 //   CallbackWrapper wrapper(tag_map, o);
 559 //
 560 //   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
 561 //
 562 // } // wrapper goes out of scope here which results in the destructor
 563 //      checking to see if the object has been tagged, untagged, or the
 564 //      tag value has changed.
 565 //
 566 class CallbackWrapper : public StackObj {
 567  private:
 568   JvmtiTagMap* _tag_map;
 569   JvmtiTagHashmap* _hashmap;
 570   JvmtiTagHashmapEntry* _entry;
 571   oop _o;
 572   jlong _obj_size;
 573   jlong _obj_tag;
 574   jlong _klass_tag;
 575 
 576  protected:
 577   JvmtiTagMap* tag_map() const      { return _tag_map; }
 578 
 579   // invoked post-callback to tag, untag, or update the tag of an object
 580   void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
 581                                        JvmtiTagHashmapEntry* entry, jlong obj_tag);
 582  public:
 583   CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
 584     assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
 585            "MT unsafe or must be VM thread");
 586 
 587     // object to tag
 588     _o = o;
 589 
 590     // object size
 591     _obj_size = (jlong)_o->size() * wordSize;
 592 
 593     // record the context
 594     _tag_map = tag_map;
 595     _hashmap = tag_map->hashmap();
 596     _entry = _hashmap->find(_o);
 597 
 598     // get object tag
 599     _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
 600 
 601     // get the class and the class's tag value
 602     assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?");
 603 
 604     _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
 605   }
 606 
 607   ~CallbackWrapper() {
 608     post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
 609   }
 610 
 611   inline jlong* obj_tag_p()                     { return &_obj_tag; }
 612   inline jlong obj_size() const                 { return _obj_size; }
 613   inline jlong obj_tag() const                  { return _obj_tag; }
 614   inline jlong klass_tag() const                { return _klass_tag; }
 615 };
 616 
 617 
 618 
 619 // callback post-callback to tag, untag, or update the tag of an object
 620 void inline CallbackWrapper::post_callback_tag_update(oop o,
 621                                                       JvmtiTagHashmap* hashmap,
 622                                                       JvmtiTagHashmapEntry* entry,
 623                                                       jlong obj_tag) {
 624   if (entry == NULL) {
 625     if (obj_tag != 0) {
 626       // callback has tagged the object
 627       assert(Thread::current()->is_VM_thread(), "must be VMThread");
 628       entry = tag_map()->create_entry(o, obj_tag);
 629       hashmap->add(o, entry);
 630     }
 631   } else {
 632     // object was previously tagged - the callback may have untagged
 633     // the object or changed the tag value
 634     if (obj_tag == 0) {
 635 
 636       JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
 637       assert(entry_removed == entry, "checking");
 638       tag_map()->destroy_entry(entry);
 639 
 640     } else {
 641       if (obj_tag != entry->tag()) {
 642          entry->set_tag(obj_tag);
 643       }
 644     }
 645   }
 646 }
 647 
 648 // An extended CallbackWrapper used when reporting an object reference
 649 // to the agent.
 650 //
 651 // {
 652 //   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
 653 //
 654 //   (*callback)(wrapper.klass_tag(),
 655 //               wrapper.obj_size(),
 656 //               wrapper.obj_tag_p()
 657 //               wrapper.referrer_tag_p(), ...)
 658 //
 659 // } // wrapper goes out of scope here which results in the destructor
 660 //      checking to see if the referrer object has been tagged, untagged,
 661 //      or the tag value has changed.
 662 //
 663 class TwoOopCallbackWrapper : public CallbackWrapper {
 664  private:
 665   bool _is_reference_to_self;
 666   JvmtiTagHashmap* _referrer_hashmap;
 667   JvmtiTagHashmapEntry* _referrer_entry;
 668   oop _referrer;
 669   jlong _referrer_obj_tag;
 670   jlong _referrer_klass_tag;
 671   jlong* _referrer_tag_p;
 672 
 673   bool is_reference_to_self() const             { return _is_reference_to_self; }
 674 
 675  public:
 676   TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
 677     CallbackWrapper(tag_map, o)
 678   {
 679     // self reference needs to be handled in a special way
 680     _is_reference_to_self = (referrer == o);
 681 
 682     if (_is_reference_to_self) {
 683       _referrer_klass_tag = klass_tag();
 684       _referrer_tag_p = obj_tag_p();
 685     } else {
 686       _referrer = referrer;
 687       // record the context
 688       _referrer_hashmap = tag_map->hashmap();
 689       _referrer_entry = _referrer_hashmap->find(_referrer);
 690 
 691       // get object tag
 692       _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
 693       _referrer_tag_p = &_referrer_obj_tag;
 694 
 695       // get referrer class tag.
 696       _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
 697     }
 698   }
 699 
 700   ~TwoOopCallbackWrapper() {
 701     if (!is_reference_to_self()){
 702       post_callback_tag_update(_referrer,
 703                                _referrer_hashmap,
 704                                _referrer_entry,
 705                                _referrer_obj_tag);
 706     }
 707   }
 708 
 709   // address of referrer tag
 710   // (for a self reference this will return the same thing as obj_tag_p())
 711   inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
 712 
 713   // referrer's class tag
 714   inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
 715 };
 716 
 717 // tag an object
 718 //
 719 // This function is performance critical. If many threads attempt to tag objects
 720 // around the same time then it's possible that the Mutex associated with the
 721 // tag map will be a hot lock.
 722 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
 723   MutexLocker ml(lock());
 724 
 725   // resolve the object
 726   oop o = JNIHandles::resolve_non_null(object);
 727 
 728   // see if the object is already tagged
 729   JvmtiTagHashmap* hashmap = _hashmap;
 730   JvmtiTagHashmapEntry* entry = hashmap->find(o);
 731 
 732   // if the object is not already tagged then we tag it
 733   if (entry == NULL) {
 734     if (tag != 0) {
 735       entry = create_entry(o, tag);
 736       hashmap->add(o, entry);
 737     } else {
 738       // no-op
 739     }
 740   } else {
 741     // if the object is already tagged then we either update
 742     // the tag (if a new tag value has been provided)
 743     // or remove the object if the new tag value is 0.
 744     if (tag == 0) {
 745       hashmap->remove(o);
 746       destroy_entry(entry);
 747     } else {
 748       entry->set_tag(tag);
 749     }
 750   }
 751 }
 752 
 753 // get the tag for an object
 754 jlong JvmtiTagMap::get_tag(jobject object) {
 755   MutexLocker ml(lock());
 756 
 757   // resolve the object
 758   oop o = JNIHandles::resolve_non_null(object);
 759 
 760   return tag_for(this, o);
 761 }
 762 
 763 
 764 // Helper class used to describe the static or instance fields of a class.
 765 // For each field it holds the field index (as defined by the JVMTI specification),
 766 // the field type, and the offset.
 767 
 768 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
 769  private:
 770   int _field_index;
 771   int _field_offset;
 772   char _field_type;
 773  public:
 774   ClassFieldDescriptor(int index, char type, int offset) :
 775     _field_index(index), _field_type(type), _field_offset(offset) {
 776   }
 777   int field_index()  const  { return _field_index; }
 778   char field_type()  const  { return _field_type; }
 779   int field_offset() const  { return _field_offset; }
 780 };
 781 
 782 class ClassFieldMap: public CHeapObj<mtInternal> {
 783  private:
 784   enum {
 785     initial_field_count = 5
 786   };
 787 
 788   // list of field descriptors
 789   GrowableArray<ClassFieldDescriptor*>* _fields;
 790 
 791   // constructor
 792   ClassFieldMap();
 793 
 794   // add a field
 795   void add(int index, char type, int offset);
 796 
 797   // returns the field count for the given class
 798   static int compute_field_count(InstanceKlass* ik);
 799 
 800  public:
 801   ~ClassFieldMap();
 802 
 803   // access
 804   int field_count()                     { return _fields->length(); }
 805   ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
 806 
 807   // functions to create maps of static or instance fields
 808   static ClassFieldMap* create_map_of_static_fields(Klass* k);
 809   static ClassFieldMap* create_map_of_instance_fields(oop obj);
 810 };
 811 
 812 ClassFieldMap::ClassFieldMap() {
 813   _fields = new (ResourceObj::C_HEAP, mtInternal)
 814     GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
 815 }
 816 
 817 ClassFieldMap::~ClassFieldMap() {
 818   for (int i=0; i<_fields->length(); i++) {
 819     delete _fields->at(i);
 820   }
 821   delete _fields;
 822 }
 823 
 824 void ClassFieldMap::add(int index, char type, int offset) {
 825   ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
 826   _fields->append(field);
 827 }
 828 
 829 // Returns a heap allocated ClassFieldMap to describe the static fields
 830 // of the given class.
 831 //
 832 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
 833   HandleMark hm;
 834   InstanceKlass* ik = InstanceKlass::cast(k);
 835 
 836   // create the field map
 837   ClassFieldMap* field_map = new ClassFieldMap();
 838 
 839   FilteredFieldStream f(ik, false, false);
 840   int max_field_index = f.field_count()-1;
 841 
 842   int index = 0;
 843   for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) {
 844     // ignore instance fields
 845     if (!fld.access_flags().is_static()) {
 846       continue;
 847     }
 848     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 849   }
 850   return field_map;
 851 }
 852 
 853 // Returns a heap allocated ClassFieldMap to describe the instance fields
 854 // of the given class. All instance fields are included (this means public
 855 // and private fields declared in superclasses and superinterfaces too).
 856 //
 857 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
 858   HandleMark hm;
 859   InstanceKlass* ik = InstanceKlass::cast(obj->klass());
 860 
 861   // create the field map
 862   ClassFieldMap* field_map = new ClassFieldMap();
 863 
 864   FilteredFieldStream f(ik, false, false);
 865 
 866   int max_field_index = f.field_count()-1;
 867 
 868   int index = 0;
 869   for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) {
 870     // ignore static fields
 871     if (fld.access_flags().is_static()) {
 872       continue;
 873     }
 874     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 875   }
 876 
 877   return field_map;
 878 }
 879 
 880 // Helper class used to cache a ClassFileMap for the instance fields of
 881 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
 882 // heap iteration and avoid creating a field map for each object in the heap
 883 // (only need to create the map when the first instance of a class is encountered).
 884 //
 885 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
 886  private:
 887    enum {
 888      initial_class_count = 200
 889    };
 890   ClassFieldMap* _field_map;
 891 
 892   ClassFieldMap* field_map() const          { return _field_map; }
 893 
 894   JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
 895   ~JvmtiCachedClassFieldMap();
 896 
 897   static GrowableArray<InstanceKlass*>* _class_list;
 898   static void add_to_class_list(InstanceKlass* ik);
 899 
 900  public:
 901   // returns the field map for a given object (returning map cached
 902   // by InstanceKlass if possible
 903   static ClassFieldMap* get_map_of_instance_fields(oop obj);
 904 
 905   // removes the field map from all instanceKlasses - should be
 906   // called before VM operation completes
 907   static void clear_cache();
 908 
 909   // returns the number of ClassFieldMap cached by instanceKlasses
 910   static int cached_field_map_count();
 911 };
 912 
 913 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
 914 
 915 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
 916   _field_map = field_map;
 917 }
 918 
 919 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
 920   if (_field_map != NULL) {
 921     delete _field_map;
 922   }
 923 }
 924 
 925 // Marker class to ensure that the class file map cache is only used in a defined
 926 // scope.
 927 class ClassFieldMapCacheMark : public StackObj {
 928  private:
 929    static bool _is_active;
 930  public:
 931    ClassFieldMapCacheMark() {
 932      assert(Thread::current()->is_VM_thread(), "must be VMThread");
 933      assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
 934      assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
 935      _is_active = true;
 936    }
 937    ~ClassFieldMapCacheMark() {
 938      JvmtiCachedClassFieldMap::clear_cache();
 939      _is_active = false;
 940    }
 941    static bool is_active() { return _is_active; }
 942 };
 943 
 944 bool ClassFieldMapCacheMark::_is_active;
 945 
 946 
 947 // record that the given InstanceKlass is caching a field map
 948 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
 949   if (_class_list == NULL) {
 950     _class_list = new (ResourceObj::C_HEAP, mtInternal)
 951       GrowableArray<InstanceKlass*>(initial_class_count, true);
 952   }
 953   _class_list->push(ik);
 954 }
 955 
 956 // returns the instance field map for the given object
 957 // (returns field map cached by the InstanceKlass if possible)
 958 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
 959   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 960   assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
 961 
 962   Klass* k = obj->klass();
 963   InstanceKlass* ik = InstanceKlass::cast(k);
 964 
 965   // return cached map if possible
 966   JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 967   if (cached_map != NULL) {
 968     assert(cached_map->field_map() != NULL, "missing field list");
 969     return cached_map->field_map();
 970   } else {
 971     ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
 972     cached_map = new JvmtiCachedClassFieldMap(field_map);
 973     ik->set_jvmti_cached_class_field_map(cached_map);
 974     add_to_class_list(ik);
 975     return field_map;
 976   }
 977 }
 978 
 979 // remove the fields maps cached from all instanceKlasses
 980 void JvmtiCachedClassFieldMap::clear_cache() {
 981   assert(Thread::current()->is_VM_thread(), "must be VMThread");
 982   if (_class_list != NULL) {
 983     for (int i = 0; i < _class_list->length(); i++) {
 984       InstanceKlass* ik = _class_list->at(i);
 985       JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
 986       assert(cached_map != NULL, "should not be NULL");
 987       ik->set_jvmti_cached_class_field_map(NULL);
 988       delete cached_map;  // deletes the encapsulated field map
 989     }
 990     delete _class_list;
 991     _class_list = NULL;
 992   }
 993 }
 994 
 995 // returns the number of ClassFieldMap cached by instanceKlasses
 996 int JvmtiCachedClassFieldMap::cached_field_map_count() {
 997   return (_class_list == NULL) ? 0 : _class_list->length();
 998 }
 999 
1000 // helper function to indicate if an object is filtered by its tag or class tag
1001 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
1002                                               jlong klass_tag,
1003                                               int heap_filter) {
1004   // apply the heap filter
1005   if (obj_tag != 0) {
1006     // filter out tagged objects
1007     if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
1008   } else {
1009     // filter out untagged objects
1010     if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
1011   }
1012   if (klass_tag != 0) {
1013     // filter out objects with tagged classes
1014     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1015   } else {
1016     // filter out objects with untagged classes.
1017     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1018   }
1019   return false;
1020 }
1021 
1022 // helper function to indicate if an object is filtered by a klass filter
1023 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) {
1024   if (klass_filter != NULL) {
1025     if (obj->klass() != klass_filter) {
1026       return true;
1027     }
1028   }
1029   return false;
1030 }
1031 
1032 // helper function to tell if a field is a primitive field or not
1033 static inline bool is_primitive_field_type(char type) {
1034   return (type != 'L' && type != '[');
1035 }
1036 
1037 // helper function to copy the value from location addr to jvalue.
1038 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1039   switch (value_type) {
1040     case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1041     case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
1042     case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
1043     case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
1044     case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
1045     case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
1046     case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
1047     case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
1048     default: ShouldNotReachHere();
1049   }
1050 }
1051 
1052 // helper function to invoke string primitive value callback
1053 // returns visit control flags
1054 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1055                                          CallbackWrapper* wrapper,
1056                                          oop str,
1057                                          void* user_data)
1058 {
1059   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1060 
1061   typeArrayOop s_value = java_lang_String::value(str);
1062 
1063   // JDK-6584008: the value field may be null if a String instance is
1064   // partially constructed.
1065   if (s_value == NULL) {
1066     return 0;
1067   }
1068   // get the string value and length
1069   // (string value may be offset from the base)
1070   int s_len = java_lang_String::length(str);
1071   bool is_latin1 = java_lang_String::is_latin1(str);
1072   jchar* value;
1073   if (s_len > 0) {
1074     if (!is_latin1) {
1075       value = s_value->char_at_addr(0);
1076     } else {
1077       // Inflate latin1 encoded string to UTF16
1078       jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal);
1079       for (int i = 0; i < s_len; i++) {
1080         buf[i] = ((jchar) s_value->byte_at(i)) & 0xff;
1081       }
1082       value = &buf[0];
1083     }
1084   } else {
1085     // Don't use char_at_addr(0) if length is 0
1086     value = (jchar*) s_value->base(T_CHAR);
1087   }
1088 
1089   // invoke the callback
1090   jint res = (*cb)(wrapper->klass_tag(),
1091                    wrapper->obj_size(),
1092                    wrapper->obj_tag_p(),
1093                    value,
1094                    (jint)s_len,
1095                    user_data);
1096 
1097   if (is_latin1 && s_len > 0) {
1098     FREE_C_HEAP_ARRAY(jchar, value);
1099   }
1100   return res;
1101 }
1102 
1103 // helper function to invoke string primitive value callback
1104 // returns visit control flags
1105 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1106                                                   CallbackWrapper* wrapper,
1107                                                   oop obj,
1108                                                   void* user_data)
1109 {
1110   assert(obj->is_typeArray(), "not a primitive array");
1111 
1112   // get base address of first element
1113   typeArrayOop array = typeArrayOop(obj);
1114   BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1115   void* elements = array->base(type);
1116 
1117   // jvmtiPrimitiveType is defined so this mapping is always correct
1118   jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1119 
1120   return (*cb)(wrapper->klass_tag(),
1121                wrapper->obj_size(),
1122                wrapper->obj_tag_p(),
1123                (jint)array->length(),
1124                elem_type,
1125                elements,
1126                user_data);
1127 }
1128 
1129 // helper function to invoke the primitive field callback for all static fields
1130 // of a given class
1131 static jint invoke_primitive_field_callback_for_static_fields
1132   (CallbackWrapper* wrapper,
1133    oop obj,
1134    jvmtiPrimitiveFieldCallback cb,
1135    void* user_data)
1136 {
1137   // for static fields only the index will be set
1138   static jvmtiHeapReferenceInfo reference_info = { 0 };
1139 
1140   assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1141   if (java_lang_Class::is_primitive(obj)) {
1142     return 0;
1143   }
1144   Klass* klass = java_lang_Class::as_Klass(obj);
1145 
1146   // ignore classes for object and type arrays
1147   if (!klass->is_instance_klass()) {
1148     return 0;
1149   }
1150 
1151   // ignore classes which aren't linked yet
1152   InstanceKlass* ik = InstanceKlass::cast(klass);
1153   if (!ik->is_linked()) {
1154     return 0;
1155   }
1156 
1157   // get the field map
1158   ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1159 
1160   // invoke the callback for each static primitive field
1161   for (int i=0; i<field_map->field_count(); i++) {
1162     ClassFieldDescriptor* field = field_map->field_at(i);
1163 
1164     // ignore non-primitive fields
1165     char type = field->field_type();
1166     if (!is_primitive_field_type(type)) {
1167       continue;
1168     }
1169     // one-to-one mapping
1170     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1171 
1172     // get offset and field value
1173     int offset = field->field_offset();
1174     address addr = (address)klass->java_mirror() + offset;
1175     jvalue value;
1176     copy_to_jvalue(&value, addr, value_type);
1177 
1178     // field index
1179     reference_info.field.index = field->field_index();
1180 
1181     // invoke the callback
1182     jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1183                      &reference_info,
1184                      wrapper->klass_tag(),
1185                      wrapper->obj_tag_p(),
1186                      value,
1187                      value_type,
1188                      user_data);
1189     if (res & JVMTI_VISIT_ABORT) {
1190       delete field_map;
1191       return res;
1192     }
1193   }
1194 
1195   delete field_map;
1196   return 0;
1197 }
1198 
1199 // helper function to invoke the primitive field callback for all instance fields
1200 // of a given object
1201 static jint invoke_primitive_field_callback_for_instance_fields(
1202   CallbackWrapper* wrapper,
1203   oop obj,
1204   jvmtiPrimitiveFieldCallback cb,
1205   void* user_data)
1206 {
1207   // for instance fields only the index will be set
1208   static jvmtiHeapReferenceInfo reference_info = { 0 };
1209 
1210   // get the map of the instance fields
1211   ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1212 
1213   // invoke the callback for each instance primitive field
1214   for (int i=0; i<fields->field_count(); i++) {
1215     ClassFieldDescriptor* field = fields->field_at(i);
1216 
1217     // ignore non-primitive fields
1218     char type = field->field_type();
1219     if (!is_primitive_field_type(type)) {
1220       continue;
1221     }
1222     // one-to-one mapping
1223     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1224 
1225     // get offset and field value
1226     int offset = field->field_offset();
1227     address addr = (address)obj + offset;
1228     jvalue value;
1229     copy_to_jvalue(&value, addr, value_type);
1230 
1231     // field index
1232     reference_info.field.index = field->field_index();
1233 
1234     // invoke the callback
1235     jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1236                      &reference_info,
1237                      wrapper->klass_tag(),
1238                      wrapper->obj_tag_p(),
1239                      value,
1240                      value_type,
1241                      user_data);
1242     if (res & JVMTI_VISIT_ABORT) {
1243       return res;
1244     }
1245   }
1246   return 0;
1247 }
1248 
1249 
1250 // VM operation to iterate over all objects in the heap (both reachable
1251 // and unreachable)
1252 class VM_HeapIterateOperation: public VM_Operation {
1253  private:
1254   ObjectClosure* _blk;
1255  public:
1256   VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1257 
1258   VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1259   void doit() {
1260     // allows class files maps to be cached during iteration
1261     ClassFieldMapCacheMark cm;
1262 
1263     // make sure that heap is parsable (fills TLABs with filler objects)
1264     Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1265 
1266     // Verify heap before iteration - if the heap gets corrupted then
1267     // JVMTI's IterateOverHeap will crash.
1268     if (VerifyBeforeIteration) {
1269       Universe::verify();
1270     }
1271 
1272     // do the iteration
1273     // If this operation encounters a bad object when using CMS,
1274     // consider using safe_object_iterate() which avoids perm gen
1275     // objects that may contain bad references.
1276     Universe::heap()->object_iterate(_blk);
1277   }
1278 
1279 };
1280 
1281 
1282 // An ObjectClosure used to support the deprecated IterateOverHeap and
1283 // IterateOverInstancesOfClass functions
1284 class IterateOverHeapObjectClosure: public ObjectClosure {
1285  private:
1286   JvmtiTagMap* _tag_map;
1287   Klass* _klass;
1288   jvmtiHeapObjectFilter _object_filter;
1289   jvmtiHeapObjectCallback _heap_object_callback;
1290   const void* _user_data;
1291 
1292   // accessors
1293   JvmtiTagMap* tag_map() const                    { return _tag_map; }
1294   jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
1295   jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1296   Klass* klass() const                            { return _klass; }
1297   const void* user_data() const                   { return _user_data; }
1298 
1299   // indicates if iteration has been aborted
1300   bool _iteration_aborted;
1301   bool is_iteration_aborted() const               { return _iteration_aborted; }
1302   void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
1303 
1304  public:
1305   IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1306                                Klass* klass,
1307                                jvmtiHeapObjectFilter object_filter,
1308                                jvmtiHeapObjectCallback heap_object_callback,
1309                                const void* user_data) :
1310     _tag_map(tag_map),
1311     _klass(klass),
1312     _object_filter(object_filter),
1313     _heap_object_callback(heap_object_callback),
1314     _user_data(user_data),
1315     _iteration_aborted(false)
1316   {
1317   }
1318 
1319   void do_object(oop o);
1320 };
1321 
1322 // invoked for each object in the heap
1323 void IterateOverHeapObjectClosure::do_object(oop o) {
1324   // check if iteration has been halted
1325   if (is_iteration_aborted()) return;
1326 
1327   // ignore any objects that aren't visible to profiler
1328   if (!ServiceUtil::visible_oop(o)) return;
1329 
1330   // instanceof check when filtering by klass
1331   if (klass() != NULL && !o->is_a(klass())) {
1332     return;
1333   }
1334   // prepare for the calllback
1335   CallbackWrapper wrapper(tag_map(), o);
1336 
1337   // if the object is tagged and we're only interested in untagged objects
1338   // then don't invoke the callback. Similiarly, if the object is untagged
1339   // and we're only interested in tagged objects we skip the callback.
1340   if (wrapper.obj_tag() != 0) {
1341     if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1342   } else {
1343     if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1344   }
1345 
1346   // invoke the agent's callback
1347   jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1348                                                        wrapper.obj_size(),
1349                                                        wrapper.obj_tag_p(),
1350                                                        (void*)user_data());
1351   if (control == JVMTI_ITERATION_ABORT) {
1352     set_iteration_aborted(true);
1353   }
1354 }
1355 
1356 // An ObjectClosure used to support the IterateThroughHeap function
1357 class IterateThroughHeapObjectClosure: public ObjectClosure {
1358  private:
1359   JvmtiTagMap* _tag_map;
1360   Klass* _klass;
1361   int _heap_filter;
1362   const jvmtiHeapCallbacks* _callbacks;
1363   const void* _user_data;
1364 
1365   // accessor functions
1366   JvmtiTagMap* tag_map() const                     { return _tag_map; }
1367   int heap_filter() const                          { return _heap_filter; }
1368   const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1369   Klass* klass() const                             { return _klass; }
1370   const void* user_data() const                    { return _user_data; }
1371 
1372   // indicates if the iteration has been aborted
1373   bool _iteration_aborted;
1374   bool is_iteration_aborted() const                { return _iteration_aborted; }
1375 
1376   // used to check the visit control flags. If the abort flag is set
1377   // then we set the iteration aborted flag so that the iteration completes
1378   // without processing any further objects
1379   bool check_flags_for_abort(jint flags) {
1380     bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1381     if (is_abort) {
1382       _iteration_aborted = true;
1383     }
1384     return is_abort;
1385   }
1386 
1387  public:
1388   IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1389                                   Klass* klass,
1390                                   int heap_filter,
1391                                   const jvmtiHeapCallbacks* heap_callbacks,
1392                                   const void* user_data) :
1393     _tag_map(tag_map),
1394     _klass(klass),
1395     _heap_filter(heap_filter),
1396     _callbacks(heap_callbacks),
1397     _user_data(user_data),
1398     _iteration_aborted(false)
1399   {
1400   }
1401 
1402   void do_object(oop o);
1403 };
1404 
1405 // invoked for each object in the heap
1406 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1407   // check if iteration has been halted
1408   if (is_iteration_aborted()) return;
1409 
1410   // ignore any objects that aren't visible to profiler
1411   if (!ServiceUtil::visible_oop(obj)) return;
1412 
1413   // apply class filter
1414   if (is_filtered_by_klass_filter(obj, klass())) return;
1415 
1416   // prepare for callback
1417   CallbackWrapper wrapper(tag_map(), obj);
1418 
1419   // check if filtered by the heap filter
1420   if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1421     return;
1422   }
1423 
1424   // for arrays we need the length, otherwise -1
1425   bool is_array = obj->is_array();
1426   int len = is_array ? arrayOop(obj)->length() : -1;
1427 
1428   // invoke the object callback (if callback is provided)
1429   if (callbacks()->heap_iteration_callback != NULL) {
1430     jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1431     jint res = (*cb)(wrapper.klass_tag(),
1432                      wrapper.obj_size(),
1433                      wrapper.obj_tag_p(),
1434                      (jint)len,
1435                      (void*)user_data());
1436     if (check_flags_for_abort(res)) return;
1437   }
1438 
1439   // for objects and classes we report primitive fields if callback provided
1440   if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1441     jint res;
1442     jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1443     if (obj->klass() == SystemDictionary::Class_klass()) {
1444       res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1445                                                                     obj,
1446                                                                     cb,
1447                                                                     (void*)user_data());
1448     } else {
1449       res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1450                                                                       obj,
1451                                                                       cb,
1452                                                                       (void*)user_data());
1453     }
1454     if (check_flags_for_abort(res)) return;
1455   }
1456 
1457   // string callback
1458   if (!is_array &&
1459       callbacks()->string_primitive_value_callback != NULL &&
1460       obj->klass() == SystemDictionary::String_klass()) {
1461     jint res = invoke_string_value_callback(
1462                 callbacks()->string_primitive_value_callback,
1463                 &wrapper,
1464                 obj,
1465                 (void*)user_data() );
1466     if (check_flags_for_abort(res)) return;
1467   }
1468 
1469   // array callback
1470   if (is_array &&
1471       callbacks()->array_primitive_value_callback != NULL &&
1472       obj->is_typeArray()) {
1473     jint res = invoke_array_primitive_value_callback(
1474                callbacks()->array_primitive_value_callback,
1475                &wrapper,
1476                obj,
1477                (void*)user_data() );
1478     if (check_flags_for_abort(res)) return;
1479   }
1480 };
1481 
1482 
1483 // Deprecated function to iterate over all objects in the heap
1484 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1485                                     Klass* klass,
1486                                     jvmtiHeapObjectCallback heap_object_callback,
1487                                     const void* user_data)
1488 {
1489   MutexLocker ml(Heap_lock);
1490   IterateOverHeapObjectClosure blk(this,
1491                                    klass,
1492                                    object_filter,
1493                                    heap_object_callback,
1494                                    user_data);
1495   VM_HeapIterateOperation op(&blk);
1496   VMThread::execute(&op);
1497 }
1498 
1499 
1500 // Iterates over all objects in the heap
1501 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1502                                        Klass* klass,
1503                                        const jvmtiHeapCallbacks* callbacks,
1504                                        const void* user_data)
1505 {
1506   MutexLocker ml(Heap_lock);
1507   IterateThroughHeapObjectClosure blk(this,
1508                                       klass,
1509                                       heap_filter,
1510                                       callbacks,
1511                                       user_data);
1512   VM_HeapIterateOperation op(&blk);
1513   VMThread::execute(&op);
1514 }
1515 
1516 // support class for get_objects_with_tags
1517 
1518 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1519  private:
1520   JvmtiEnv* _env;
1521   jlong* _tags;
1522   jint _tag_count;
1523 
1524   GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1525   GrowableArray<uint64_t>* _tag_results;    // collected tags
1526 
1527  public:
1528   TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1529     _env = env;
1530     _tags = (jlong*)tags;
1531     _tag_count = tag_count;
1532     _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1533     _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1534   }
1535 
1536   ~TagObjectCollector() {
1537     delete _object_results;
1538     delete _tag_results;
1539   }
1540 
1541   // for each tagged object check if the tag value matches
1542   // - if it matches then we create a JNI local reference to the object
1543   // and record the reference and tag value.
1544   //
1545   void do_entry(JvmtiTagHashmapEntry* entry) {
1546     for (int i=0; i<_tag_count; i++) {
1547       if (_tags[i] == entry->tag()) {
1548         // The reference in this tag map could be the only (implicitly weak)
1549         // reference to that object. If we hand it out, we need to keep it live wrt
1550         // SATB marking similar to other j.l.ref.Reference referents. This is
1551         // achieved by using a phantom load in the object() accessor.
1552         oop o = entry->object();
1553         assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1554         jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1555         _object_results->append(ref);
1556         _tag_results->append((uint64_t)entry->tag());
1557       }
1558     }
1559   }
1560 
1561   // return the results from the collection
1562   //
1563   jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1564     jvmtiError error;
1565     int count = _object_results->length();
1566     assert(count >= 0, "sanity check");
1567 
1568     // if object_result_ptr is not NULL then allocate the result and copy
1569     // in the object references.
1570     if (object_result_ptr != NULL) {
1571       error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1572       if (error != JVMTI_ERROR_NONE) {
1573         return error;
1574       }
1575       for (int i=0; i<count; i++) {
1576         (*object_result_ptr)[i] = _object_results->at(i);
1577       }
1578     }
1579 
1580     // if tag_result_ptr is not NULL then allocate the result and copy
1581     // in the tag values.
1582     if (tag_result_ptr != NULL) {
1583       error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1584       if (error != JVMTI_ERROR_NONE) {
1585         if (object_result_ptr != NULL) {
1586           _env->Deallocate((unsigned char*)object_result_ptr);
1587         }
1588         return error;
1589       }
1590       for (int i=0; i<count; i++) {
1591         (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1592       }
1593     }
1594 
1595     *count_ptr = count;
1596     return JVMTI_ERROR_NONE;
1597   }
1598 };
1599 
1600 // return the list of objects with the specified tags
1601 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1602   jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1603 
1604   TagObjectCollector collector(env(), tags, count);
1605   {
1606     // iterate over all tagged objects
1607     MutexLocker ml(lock());
1608     entry_iterate(&collector);
1609   }
1610   return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1611 }
1612 
1613 
1614 // ObjectMarker is used to support the marking objects when walking the
1615 // heap.
1616 //
1617 // This implementation uses the existing mark bits in an object for
1618 // marking. Objects that are marked must later have their headers restored.
1619 // As most objects are unlocked and don't have their identity hash computed
1620 // we don't have to save their headers. Instead we save the headers that
1621 // are "interesting". Later when the headers are restored this implementation
1622 // restores all headers to their initial value and then restores the few
1623 // objects that had interesting headers.
1624 //
1625 // Future work: This implementation currently uses growable arrays to save
1626 // the oop and header of interesting objects. As an optimization we could
1627 // use the same technique as the GC and make use of the unused area
1628 // between top() and end().
1629 //
1630 
1631 // An ObjectClosure used to restore the mark bits of an object
1632 class RestoreMarksClosure : public ObjectClosure {
1633  public:
1634   void do_object(oop o) {
1635     if (o != NULL) {
1636       markOop mark = o->mark();
1637       if (mark->is_marked()) {
1638         o->init_mark();
1639       }
1640     }
1641   }
1642 };
1643 
1644 // ObjectMarker provides the mark and visited functions
1645 class ObjectMarker : AllStatic {
1646  private:
1647   // saved headers
1648   static GrowableArray<oop>* _saved_oop_stack;
1649   static GrowableArray<markOop>* _saved_mark_stack;
1650   static bool _needs_reset;                  // do we need to reset mark bits?
1651 
1652  public:
1653   static void init();                       // initialize
1654   static void done();                       // clean-up
1655 
1656   static inline void mark(oop o);           // mark an object
1657   static inline bool visited(oop o);        // check if object has been visited
1658 
1659   static inline bool needs_reset()            { return _needs_reset; }
1660   static inline void set_needs_reset(bool v)  { _needs_reset = v; }
1661 };
1662 
1663 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1664 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1665 bool ObjectMarker::_needs_reset = true;  // need to reset mark bits by default
1666 
1667 // initialize ObjectMarker - prepares for object marking
1668 void ObjectMarker::init() {
1669   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1670 
1671   // prepare heap for iteration
1672   Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1673 
1674   // create stacks for interesting headers
1675   _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1676   _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1677 
1678   if (UseBiasedLocking) {
1679     BiasedLocking::preserve_marks();
1680   }
1681 }
1682 
1683 // Object marking is done so restore object headers
1684 void ObjectMarker::done() {
1685   // iterate over all objects and restore the mark bits to
1686   // their initial value
1687   RestoreMarksClosure blk;
1688   if (needs_reset()) {
1689     Universe::heap()->object_iterate(&blk);
1690   } else {
1691     // We don't need to reset mark bits on this call, but reset the
1692     // flag to the default for the next call.
1693     set_needs_reset(true);
1694   }
1695 
1696   // now restore the interesting headers
1697   for (int i = 0; i < _saved_oop_stack->length(); i++) {
1698     oop o = _saved_oop_stack->at(i);
1699     markOop mark = _saved_mark_stack->at(i);
1700     o->set_mark(mark);
1701   }
1702 
1703   if (UseBiasedLocking) {
1704     BiasedLocking::restore_marks();
1705   }
1706 
1707   // free the stacks
1708   delete _saved_oop_stack;
1709   delete _saved_mark_stack;
1710 }
1711 
1712 // mark an object
1713 inline void ObjectMarker::mark(oop o) {
1714   assert(Universe::heap()->is_in(o), "sanity check");
1715   assert(!o->mark()->is_marked(), "should only mark an object once");
1716 
1717   // object's mark word
1718   markOop mark = o->mark();
1719 
1720   if (mark->must_be_preserved(o)) {
1721     _saved_mark_stack->push(mark);
1722     _saved_oop_stack->push(o);
1723   }
1724 
1725   // mark the object
1726   o->set_mark(markOopDesc::prototype()->set_marked());
1727 }
1728 
1729 // return true if object is marked
1730 inline bool ObjectMarker::visited(oop o) {
1731   return o->mark()->is_marked();
1732 }
1733 
1734 // Stack allocated class to help ensure that ObjectMarker is used
1735 // correctly. Constructor initializes ObjectMarker, destructor calls
1736 // ObjectMarker's done() function to restore object headers.
1737 class ObjectMarkerController : public StackObj {
1738  public:
1739   ObjectMarkerController() {
1740     ObjectMarker::init();
1741   }
1742   ~ObjectMarkerController() {
1743     ObjectMarker::done();
1744   }
1745 };
1746 
1747 
1748 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1749 // (not performance critical as only used for roots)
1750 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1751   switch (kind) {
1752     case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1753     case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1754     case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
1755     case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1756     case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1757     case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1758     case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1759     default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1760   }
1761 }
1762 
1763 // Base class for all heap walk contexts. The base class maintains a flag
1764 // to indicate if the context is valid or not.
1765 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1766  private:
1767   bool _valid;
1768  public:
1769   HeapWalkContext(bool valid)                   { _valid = valid; }
1770   void invalidate()                             { _valid = false; }
1771   bool is_valid() const                         { return _valid; }
1772 };
1773 
1774 // A basic heap walk context for the deprecated heap walking functions.
1775 // The context for a basic heap walk are the callbacks and fields used by
1776 // the referrer caching scheme.
1777 class BasicHeapWalkContext: public HeapWalkContext {
1778  private:
1779   jvmtiHeapRootCallback _heap_root_callback;
1780   jvmtiStackReferenceCallback _stack_ref_callback;
1781   jvmtiObjectReferenceCallback _object_ref_callback;
1782 
1783   // used for caching
1784   oop _last_referrer;
1785   jlong _last_referrer_tag;
1786 
1787  public:
1788   BasicHeapWalkContext() : HeapWalkContext(false) { }
1789 
1790   BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1791                        jvmtiStackReferenceCallback stack_ref_callback,
1792                        jvmtiObjectReferenceCallback object_ref_callback) :
1793     HeapWalkContext(true),
1794     _heap_root_callback(heap_root_callback),
1795     _stack_ref_callback(stack_ref_callback),
1796     _object_ref_callback(object_ref_callback),
1797     _last_referrer(NULL),
1798     _last_referrer_tag(0) {
1799   }
1800 
1801   // accessors
1802   jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1803   jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1804   jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1805 
1806   oop last_referrer() const               { return _last_referrer; }
1807   void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1808   jlong last_referrer_tag() const         { return _last_referrer_tag; }
1809   void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1810 };
1811 
1812 // The advanced heap walk context for the FollowReferences functions.
1813 // The context is the callbacks, and the fields used for filtering.
1814 class AdvancedHeapWalkContext: public HeapWalkContext {
1815  private:
1816   jint _heap_filter;
1817   Klass* _klass_filter;
1818   const jvmtiHeapCallbacks* _heap_callbacks;
1819 
1820  public:
1821   AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1822 
1823   AdvancedHeapWalkContext(jint heap_filter,
1824                            Klass* klass_filter,
1825                            const jvmtiHeapCallbacks* heap_callbacks) :
1826     HeapWalkContext(true),
1827     _heap_filter(heap_filter),
1828     _klass_filter(klass_filter),
1829     _heap_callbacks(heap_callbacks) {
1830   }
1831 
1832   // accessors
1833   jint heap_filter() const         { return _heap_filter; }
1834   Klass* klass_filter() const      { return _klass_filter; }
1835 
1836   const jvmtiHeapReferenceCallback heap_reference_callback() const {
1837     return _heap_callbacks->heap_reference_callback;
1838   };
1839   const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1840     return _heap_callbacks->primitive_field_callback;
1841   }
1842   const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1843     return _heap_callbacks->array_primitive_value_callback;
1844   }
1845   const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1846     return _heap_callbacks->string_primitive_value_callback;
1847   }
1848 };
1849 
1850 // The CallbackInvoker is a class with static functions that the heap walk can call
1851 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1852 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1853 // mode is for the newer FollowReferences function which supports a lot of
1854 // additional callbacks.
1855 class CallbackInvoker : AllStatic {
1856  private:
1857   // heap walk styles
1858   enum { basic, advanced };
1859   static int _heap_walk_type;
1860   static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1861   static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1862 
1863   // context for basic style heap walk
1864   static BasicHeapWalkContext _basic_context;
1865   static BasicHeapWalkContext* basic_context() {
1866     assert(_basic_context.is_valid(), "invalid");
1867     return &_basic_context;
1868   }
1869 
1870   // context for advanced style heap walk
1871   static AdvancedHeapWalkContext _advanced_context;
1872   static AdvancedHeapWalkContext* advanced_context() {
1873     assert(_advanced_context.is_valid(), "invalid");
1874     return &_advanced_context;
1875   }
1876 
1877   // context needed for all heap walks
1878   static JvmtiTagMap* _tag_map;
1879   static const void* _user_data;
1880   static GrowableArray<oop>* _visit_stack;
1881 
1882   // accessors
1883   static JvmtiTagMap* tag_map()                        { return _tag_map; }
1884   static const void* user_data()                       { return _user_data; }
1885   static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1886 
1887   // if the object hasn't been visited then push it onto the visit stack
1888   // so that it will be visited later
1889   static inline bool check_for_visit(oop obj) {
1890     if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1891     return true;
1892   }
1893 
1894   // invoke basic style callbacks
1895   static inline bool invoke_basic_heap_root_callback
1896     (jvmtiHeapRootKind root_kind, oop obj);
1897   static inline bool invoke_basic_stack_ref_callback
1898     (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1899      int slot, oop obj);
1900   static inline bool invoke_basic_object_reference_callback
1901     (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1902 
1903   // invoke advanced style callbacks
1904   static inline bool invoke_advanced_heap_root_callback
1905     (jvmtiHeapReferenceKind ref_kind, oop obj);
1906   static inline bool invoke_advanced_stack_ref_callback
1907     (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1908      jmethodID method, jlocation bci, jint slot, oop obj);
1909   static inline bool invoke_advanced_object_reference_callback
1910     (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1911 
1912   // used to report the value of primitive fields
1913   static inline bool report_primitive_field
1914     (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1915 
1916  public:
1917   // initialize for basic mode
1918   static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1919                                              GrowableArray<oop>* visit_stack,
1920                                              const void* user_data,
1921                                              BasicHeapWalkContext context);
1922 
1923   // initialize for advanced mode
1924   static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1925                                                 GrowableArray<oop>* visit_stack,
1926                                                 const void* user_data,
1927                                                 AdvancedHeapWalkContext context);
1928 
1929    // functions to report roots
1930   static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1931   static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1932     jmethodID m, oop o);
1933   static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1934     jmethodID method, jlocation bci, jint slot, oop o);
1935 
1936   // functions to report references
1937   static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1938   static inline bool report_class_reference(oop referrer, oop referree);
1939   static inline bool report_class_loader_reference(oop referrer, oop referree);
1940   static inline bool report_signers_reference(oop referrer, oop referree);
1941   static inline bool report_protection_domain_reference(oop referrer, oop referree);
1942   static inline bool report_superclass_reference(oop referrer, oop referree);
1943   static inline bool report_interface_reference(oop referrer, oop referree);
1944   static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1945   static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1946   static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1947   static inline bool report_primitive_array_values(oop array);
1948   static inline bool report_string_value(oop str);
1949   static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1950   static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1951 };
1952 
1953 // statics
1954 int CallbackInvoker::_heap_walk_type;
1955 BasicHeapWalkContext CallbackInvoker::_basic_context;
1956 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1957 JvmtiTagMap* CallbackInvoker::_tag_map;
1958 const void* CallbackInvoker::_user_data;
1959 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1960 
1961 // initialize for basic heap walk (IterateOverReachableObjects et al)
1962 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1963                                                      GrowableArray<oop>* visit_stack,
1964                                                      const void* user_data,
1965                                                      BasicHeapWalkContext context) {
1966   _tag_map = tag_map;
1967   _visit_stack = visit_stack;
1968   _user_data = user_data;
1969   _basic_context = context;
1970   _advanced_context.invalidate();       // will trigger assertion if used
1971   _heap_walk_type = basic;
1972 }
1973 
1974 // initialize for advanced heap walk (FollowReferences)
1975 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1976                                                         GrowableArray<oop>* visit_stack,
1977                                                         const void* user_data,
1978                                                         AdvancedHeapWalkContext context) {
1979   _tag_map = tag_map;
1980   _visit_stack = visit_stack;
1981   _user_data = user_data;
1982   _advanced_context = context;
1983   _basic_context.invalidate();      // will trigger assertion if used
1984   _heap_walk_type = advanced;
1985 }
1986 
1987 
1988 // invoke basic style heap root callback
1989 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1990   assert(ServiceUtil::visible_oop(obj), "checking");
1991 
1992   // if we heap roots should be reported
1993   jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1994   if (cb == NULL) {
1995     return check_for_visit(obj);
1996   }
1997 
1998   CallbackWrapper wrapper(tag_map(), obj);
1999   jvmtiIterationControl control = (*cb)(root_kind,
2000                                         wrapper.klass_tag(),
2001                                         wrapper.obj_size(),
2002                                         wrapper.obj_tag_p(),
2003                                         (void*)user_data());
2004   // push root to visit stack when following references
2005   if (control == JVMTI_ITERATION_CONTINUE &&
2006       basic_context()->object_ref_callback() != NULL) {
2007     visit_stack()->push(obj);
2008   }
2009   return control != JVMTI_ITERATION_ABORT;
2010 }
2011 
2012 // invoke basic style stack ref callback
2013 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
2014                                                              jlong thread_tag,
2015                                                              jint depth,
2016                                                              jmethodID method,
2017                                                              int slot,
2018                                                              oop obj) {
2019   assert(ServiceUtil::visible_oop(obj), "checking");
2020 
2021   // if we stack refs should be reported
2022   jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
2023   if (cb == NULL) {
2024     return check_for_visit(obj);
2025   }
2026 
2027   CallbackWrapper wrapper(tag_map(), obj);
2028   jvmtiIterationControl control = (*cb)(root_kind,
2029                                         wrapper.klass_tag(),
2030                                         wrapper.obj_size(),
2031                                         wrapper.obj_tag_p(),
2032                                         thread_tag,
2033                                         depth,
2034                                         method,
2035                                         slot,
2036                                         (void*)user_data());
2037   // push root to visit stack when following references
2038   if (control == JVMTI_ITERATION_CONTINUE &&
2039       basic_context()->object_ref_callback() != NULL) {
2040     visit_stack()->push(obj);
2041   }
2042   return control != JVMTI_ITERATION_ABORT;
2043 }
2044 
2045 // invoke basic style object reference callback
2046 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2047                                                                     oop referrer,
2048                                                                     oop referree,
2049                                                                     jint index) {
2050 
2051   assert(ServiceUtil::visible_oop(referrer), "checking");
2052   assert(ServiceUtil::visible_oop(referree), "checking");
2053 
2054   BasicHeapWalkContext* context = basic_context();
2055 
2056   // callback requires the referrer's tag. If it's the same referrer
2057   // as the last call then we use the cached value.
2058   jlong referrer_tag;
2059   if (referrer == context->last_referrer()) {
2060     referrer_tag = context->last_referrer_tag();
2061   } else {
2062     referrer_tag = tag_for(tag_map(), referrer);
2063   }
2064 
2065   // do the callback
2066   CallbackWrapper wrapper(tag_map(), referree);
2067   jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2068   jvmtiIterationControl control = (*cb)(ref_kind,
2069                                         wrapper.klass_tag(),
2070                                         wrapper.obj_size(),
2071                                         wrapper.obj_tag_p(),
2072                                         referrer_tag,
2073                                         index,
2074                                         (void*)user_data());
2075 
2076   // record referrer and referrer tag. For self-references record the
2077   // tag value from the callback as this might differ from referrer_tag.
2078   context->set_last_referrer(referrer);
2079   if (referrer == referree) {
2080     context->set_last_referrer_tag(*wrapper.obj_tag_p());
2081   } else {
2082     context->set_last_referrer_tag(referrer_tag);
2083   }
2084 
2085   if (control == JVMTI_ITERATION_CONTINUE) {
2086     return check_for_visit(referree);
2087   } else {
2088     return control != JVMTI_ITERATION_ABORT;
2089   }
2090 }
2091 
2092 // invoke advanced style heap root callback
2093 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2094                                                                 oop obj) {
2095   assert(ServiceUtil::visible_oop(obj), "checking");
2096 
2097   AdvancedHeapWalkContext* context = advanced_context();
2098 
2099   // check that callback is provided
2100   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2101   if (cb == NULL) {
2102     return check_for_visit(obj);
2103   }
2104 
2105   // apply class filter
2106   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2107     return check_for_visit(obj);
2108   }
2109 
2110   // setup the callback wrapper
2111   CallbackWrapper wrapper(tag_map(), obj);
2112 
2113   // apply tag filter
2114   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2115                                  wrapper.klass_tag(),
2116                                  context->heap_filter())) {
2117     return check_for_visit(obj);
2118   }
2119 
2120   // for arrays we need the length, otherwise -1
2121   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2122 
2123   // invoke the callback
2124   jint res  = (*cb)(ref_kind,
2125                     NULL, // referrer info
2126                     wrapper.klass_tag(),
2127                     0,    // referrer_class_tag is 0 for heap root
2128                     wrapper.obj_size(),
2129                     wrapper.obj_tag_p(),
2130                     NULL, // referrer_tag_p
2131                     len,
2132                     (void*)user_data());
2133   if (res & JVMTI_VISIT_ABORT) {
2134     return false;// referrer class tag
2135   }
2136   if (res & JVMTI_VISIT_OBJECTS) {
2137     check_for_visit(obj);
2138   }
2139   return true;
2140 }
2141 
2142 // report a reference from a thread stack to an object
2143 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2144                                                                 jlong thread_tag,
2145                                                                 jlong tid,
2146                                                                 int depth,
2147                                                                 jmethodID method,
2148                                                                 jlocation bci,
2149                                                                 jint slot,
2150                                                                 oop obj) {
2151   assert(ServiceUtil::visible_oop(obj), "checking");
2152 
2153   AdvancedHeapWalkContext* context = advanced_context();
2154 
2155   // check that callback is provider
2156   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2157   if (cb == NULL) {
2158     return check_for_visit(obj);
2159   }
2160 
2161   // apply class filter
2162   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2163     return check_for_visit(obj);
2164   }
2165 
2166   // setup the callback wrapper
2167   CallbackWrapper wrapper(tag_map(), obj);
2168 
2169   // apply tag filter
2170   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2171                                  wrapper.klass_tag(),
2172                                  context->heap_filter())) {
2173     return check_for_visit(obj);
2174   }
2175 
2176   // setup the referrer info
2177   jvmtiHeapReferenceInfo reference_info;
2178   reference_info.stack_local.thread_tag = thread_tag;
2179   reference_info.stack_local.thread_id = tid;
2180   reference_info.stack_local.depth = depth;
2181   reference_info.stack_local.method = method;
2182   reference_info.stack_local.location = bci;
2183   reference_info.stack_local.slot = slot;
2184 
2185   // for arrays we need the length, otherwise -1
2186   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2187 
2188   // call into the agent
2189   int res = (*cb)(ref_kind,
2190                   &reference_info,
2191                   wrapper.klass_tag(),
2192                   0,    // referrer_class_tag is 0 for heap root (stack)
2193                   wrapper.obj_size(),
2194                   wrapper.obj_tag_p(),
2195                   NULL, // referrer_tag is 0 for root
2196                   len,
2197                   (void*)user_data());
2198 
2199   if (res & JVMTI_VISIT_ABORT) {
2200     return false;
2201   }
2202   if (res & JVMTI_VISIT_OBJECTS) {
2203     check_for_visit(obj);
2204   }
2205   return true;
2206 }
2207 
2208 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2209 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2210 #define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
2211                       | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
2212                       | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2213                       | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2214                       | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
2215                       | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2216 
2217 // invoke the object reference callback to report a reference
2218 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2219                                                                        oop referrer,
2220                                                                        oop obj,
2221                                                                        jint index)
2222 {
2223   // field index is only valid field in reference_info
2224   static jvmtiHeapReferenceInfo reference_info = { 0 };
2225 
2226   assert(ServiceUtil::visible_oop(referrer), "checking");
2227   assert(ServiceUtil::visible_oop(obj), "checking");
2228 
2229   AdvancedHeapWalkContext* context = advanced_context();
2230 
2231   // check that callback is provider
2232   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2233   if (cb == NULL) {
2234     return check_for_visit(obj);
2235   }
2236 
2237   // apply class filter
2238   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2239     return check_for_visit(obj);
2240   }
2241 
2242   // setup the callback wrapper
2243   TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2244 
2245   // apply tag filter
2246   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2247                                  wrapper.klass_tag(),
2248                                  context->heap_filter())) {
2249     return check_for_visit(obj);
2250   }
2251 
2252   // field index is only valid field in reference_info
2253   reference_info.field.index = index;
2254 
2255   // for arrays we need the length, otherwise -1
2256   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2257 
2258   // invoke the callback
2259   int res = (*cb)(ref_kind,
2260                   (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2261                   wrapper.klass_tag(),
2262                   wrapper.referrer_klass_tag(),
2263                   wrapper.obj_size(),
2264                   wrapper.obj_tag_p(),
2265                   wrapper.referrer_tag_p(),
2266                   len,
2267                   (void*)user_data());
2268 
2269   if (res & JVMTI_VISIT_ABORT) {
2270     return false;
2271   }
2272   if (res & JVMTI_VISIT_OBJECTS) {
2273     check_for_visit(obj);
2274   }
2275   return true;
2276 }
2277 
2278 // report a "simple root"
2279 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2280   assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2281          kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2282   assert(ServiceUtil::visible_oop(obj), "checking");
2283 
2284   if (is_basic_heap_walk()) {
2285     // map to old style root kind
2286     jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2287     return invoke_basic_heap_root_callback(root_kind, obj);
2288   } else {
2289     assert(is_advanced_heap_walk(), "wrong heap walk type");
2290     return invoke_advanced_heap_root_callback(kind, obj);
2291   }
2292 }
2293 
2294 
2295 // invoke the primitive array values
2296 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2297   assert(obj->is_typeArray(), "not a primitive array");
2298 
2299   AdvancedHeapWalkContext* context = advanced_context();
2300   assert(context->array_primitive_value_callback() != NULL, "no callback");
2301 
2302   // apply class filter
2303   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2304     return true;
2305   }
2306 
2307   CallbackWrapper wrapper(tag_map(), obj);
2308 
2309   // apply tag filter
2310   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2311                                  wrapper.klass_tag(),
2312                                  context->heap_filter())) {
2313     return true;
2314   }
2315 
2316   // invoke the callback
2317   int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2318                                                   &wrapper,
2319                                                   obj,
2320                                                   (void*)user_data());
2321   return (!(res & JVMTI_VISIT_ABORT));
2322 }
2323 
2324 // invoke the string value callback
2325 inline bool CallbackInvoker::report_string_value(oop str) {
2326   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2327 
2328   AdvancedHeapWalkContext* context = advanced_context();
2329   assert(context->string_primitive_value_callback() != NULL, "no callback");
2330 
2331   // apply class filter
2332   if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2333     return true;
2334   }
2335 
2336   CallbackWrapper wrapper(tag_map(), str);
2337 
2338   // apply tag filter
2339   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2340                                  wrapper.klass_tag(),
2341                                  context->heap_filter())) {
2342     return true;
2343   }
2344 
2345   // invoke the callback
2346   int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2347                                          &wrapper,
2348                                          str,
2349                                          (void*)user_data());
2350   return (!(res & JVMTI_VISIT_ABORT));
2351 }
2352 
2353 // invoke the primitive field callback
2354 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2355                                                     oop obj,
2356                                                     jint index,
2357                                                     address addr,
2358                                                     char type)
2359 {
2360   // for primitive fields only the index will be set
2361   static jvmtiHeapReferenceInfo reference_info = { 0 };
2362 
2363   AdvancedHeapWalkContext* context = advanced_context();
2364   assert(context->primitive_field_callback() != NULL, "no callback");
2365 
2366   // apply class filter
2367   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2368     return true;
2369   }
2370 
2371   CallbackWrapper wrapper(tag_map(), obj);
2372 
2373   // apply tag filter
2374   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2375                                  wrapper.klass_tag(),
2376                                  context->heap_filter())) {
2377     return true;
2378   }
2379 
2380   // the field index in the referrer
2381   reference_info.field.index = index;
2382 
2383   // map the type
2384   jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2385 
2386   // setup the jvalue
2387   jvalue value;
2388   copy_to_jvalue(&value, addr, value_type);
2389 
2390   jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2391   int res = (*cb)(ref_kind,
2392                   &reference_info,
2393                   wrapper.klass_tag(),
2394                   wrapper.obj_tag_p(),
2395                   value,
2396                   value_type,
2397                   (void*)user_data());
2398   return (!(res & JVMTI_VISIT_ABORT));
2399 }
2400 
2401 
2402 // instance field
2403 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2404                                                              jint index,
2405                                                              address value,
2406                                                              char type) {
2407   return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2408                                 obj,
2409                                 index,
2410                                 value,
2411                                 type);
2412 }
2413 
2414 // static field
2415 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2416                                                            jint index,
2417                                                            address value,
2418                                                            char type) {
2419   return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2420                                 obj,
2421                                 index,
2422                                 value,
2423                                 type);
2424 }
2425 
2426 // report a JNI local (root object) to the profiler
2427 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2428   if (is_basic_heap_walk()) {
2429     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2430                                            thread_tag,
2431                                            depth,
2432                                            m,
2433                                            -1,
2434                                            obj);
2435   } else {
2436     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2437                                               thread_tag, tid,
2438                                               depth,
2439                                               m,
2440                                               (jlocation)-1,
2441                                               -1,
2442                                               obj);
2443   }
2444 }
2445 
2446 
2447 // report a local (stack reference, root object)
2448 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2449                                                    jlong tid,
2450                                                    jint depth,
2451                                                    jmethodID method,
2452                                                    jlocation bci,
2453                                                    jint slot,
2454                                                    oop obj) {
2455   if (is_basic_heap_walk()) {
2456     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2457                                            thread_tag,
2458                                            depth,
2459                                            method,
2460                                            slot,
2461                                            obj);
2462   } else {
2463     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2464                                               thread_tag,
2465                                               tid,
2466                                               depth,
2467                                               method,
2468                                               bci,
2469                                               slot,
2470                                               obj);
2471   }
2472 }
2473 
2474 // report an object referencing a class.
2475 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2476   if (is_basic_heap_walk()) {
2477     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2478   } else {
2479     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2480   }
2481 }
2482 
2483 // report a class referencing its class loader.
2484 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2485   if (is_basic_heap_walk()) {
2486     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2487   } else {
2488     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2489   }
2490 }
2491 
2492 // report a class referencing its signers.
2493 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2494   if (is_basic_heap_walk()) {
2495     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2496   } else {
2497     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2498   }
2499 }
2500 
2501 // report a class referencing its protection domain..
2502 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2503   if (is_basic_heap_walk()) {
2504     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2505   } else {
2506     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2507   }
2508 }
2509 
2510 // report a class referencing its superclass.
2511 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2512   if (is_basic_heap_walk()) {
2513     // Send this to be consistent with past implementation
2514     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2515   } else {
2516     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2517   }
2518 }
2519 
2520 // report a class referencing one of its interfaces.
2521 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2522   if (is_basic_heap_walk()) {
2523     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2524   } else {
2525     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2526   }
2527 }
2528 
2529 // report a class referencing one of its static fields.
2530 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2531   if (is_basic_heap_walk()) {
2532     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2533   } else {
2534     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2535   }
2536 }
2537 
2538 // report an array referencing an element object
2539 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2540   if (is_basic_heap_walk()) {
2541     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2542   } else {
2543     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2544   }
2545 }
2546 
2547 // report an object referencing an instance field object
2548 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2549   if (is_basic_heap_walk()) {
2550     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2551   } else {
2552     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2553   }
2554 }
2555 
2556 // report an array referencing an element object
2557 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2558   if (is_basic_heap_walk()) {
2559     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2560   } else {
2561     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2562   }
2563 }
2564 
2565 // A supporting closure used to process simple roots
2566 class SimpleRootsClosure : public OopClosure {
2567  private:
2568   jvmtiHeapReferenceKind _kind;
2569   bool _continue;
2570 
2571   jvmtiHeapReferenceKind root_kind()    { return _kind; }
2572 
2573  public:
2574   void set_kind(jvmtiHeapReferenceKind kind) {
2575     _kind = kind;
2576     _continue = true;
2577   }
2578 
2579   inline bool stopped() {
2580     return !_continue;
2581   }
2582 
2583   void do_oop(oop* obj_p) {
2584     // iteration has terminated
2585     if (stopped()) {
2586       return;
2587     }
2588 
2589     oop o = *obj_p;
2590     // ignore null
2591     if (o == NULL) {
2592       return;
2593     }
2594 
2595     assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2596 
2597     jvmtiHeapReferenceKind kind = root_kind();
2598     if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2599       // SystemDictionary::always_strong_oops_do reports the application
2600       // class loader as a root. We want this root to be reported as
2601       // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2602       if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) {
2603         kind = JVMTI_HEAP_REFERENCE_OTHER;
2604       }
2605     }
2606 
2607     // some objects are ignored - in the case of simple
2608     // roots it's mostly Symbol*s that we are skipping
2609     // here.
2610     if (!ServiceUtil::visible_oop(o)) {
2611       return;
2612     }
2613 
2614     // invoke the callback
2615     _continue = CallbackInvoker::report_simple_root(kind, o);
2616 
2617   }
2618   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2619 };
2620 
2621 // A supporting closure used to process JNI locals
2622 class JNILocalRootsClosure : public OopClosure {
2623  private:
2624   jlong _thread_tag;
2625   jlong _tid;
2626   jint _depth;
2627   jmethodID _method;
2628   bool _continue;
2629  public:
2630   void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2631     _thread_tag = thread_tag;
2632     _tid = tid;
2633     _depth = depth;
2634     _method = method;
2635     _continue = true;
2636   }
2637 
2638   inline bool stopped() {
2639     return !_continue;
2640   }
2641 
2642   void do_oop(oop* obj_p) {
2643     // iteration has terminated
2644     if (stopped()) {
2645       return;
2646     }
2647 
2648     oop o = *obj_p;
2649     // ignore null
2650     if (o == NULL) {
2651       return;
2652     }
2653 
2654     if (!ServiceUtil::visible_oop(o)) {
2655       return;
2656     }
2657 
2658     // invoke the callback
2659     _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2660   }
2661   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2662 };
2663 
2664 
2665 // A VM operation to iterate over objects that are reachable from
2666 // a set of roots or an initial object.
2667 //
2668 // For VM_HeapWalkOperation the set of roots used is :-
2669 //
2670 // - All JNI global references
2671 // - All inflated monitors
2672 // - All classes loaded by the boot class loader (or all classes
2673 //     in the event that class unloading is disabled)
2674 // - All java threads
2675 // - For each java thread then all locals and JNI local references
2676 //      on the thread's execution stack
2677 // - All visible/explainable objects from Universes::oops_do
2678 //
2679 class VM_HeapWalkOperation: public VM_Operation {
2680  private:
2681   enum {
2682     initial_visit_stack_size = 4000
2683   };
2684 
2685   bool _is_advanced_heap_walk;                      // indicates FollowReferences
2686   JvmtiTagMap* _tag_map;
2687   Handle _initial_object;
2688   GrowableArray<oop>* _visit_stack;                 // the visit stack
2689 
2690   bool _collecting_heap_roots;                      // are we collecting roots
2691   bool _following_object_refs;                      // are we following object references
2692 
2693   bool _reporting_primitive_fields;                 // optional reporting
2694   bool _reporting_primitive_array_values;
2695   bool _reporting_string_values;
2696 
2697   GrowableArray<oop>* create_visit_stack() {
2698     return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2699   }
2700 
2701   // accessors
2702   bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2703   JvmtiTagMap* tag_map() const                     { return _tag_map; }
2704   Handle initial_object() const                    { return _initial_object; }
2705 
2706   bool is_following_references() const             { return _following_object_refs; }
2707 
2708   bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2709   bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2710   bool is_reporting_string_values() const          { return _reporting_string_values; }
2711 
2712   GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2713 
2714   // iterate over the various object types
2715   inline bool iterate_over_array(oop o);
2716   inline bool iterate_over_type_array(oop o);
2717   inline bool iterate_over_class(oop o);
2718   inline bool iterate_over_object(oop o);
2719 
2720   // root collection
2721   inline bool collect_simple_roots();
2722   inline bool collect_stack_roots();
2723   inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2724 
2725   // visit an object
2726   inline bool visit(oop o);
2727 
2728  public:
2729   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2730                        Handle initial_object,
2731                        BasicHeapWalkContext callbacks,
2732                        const void* user_data);
2733 
2734   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2735                        Handle initial_object,
2736                        AdvancedHeapWalkContext callbacks,
2737                        const void* user_data);
2738 
2739   ~VM_HeapWalkOperation();
2740 
2741   VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2742   void doit();
2743 };
2744 
2745 
2746 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2747                                            Handle initial_object,
2748                                            BasicHeapWalkContext callbacks,
2749                                            const void* user_data) {
2750   _is_advanced_heap_walk = false;
2751   _tag_map = tag_map;
2752   _initial_object = initial_object;
2753   _following_object_refs = (callbacks.object_ref_callback() != NULL);
2754   _reporting_primitive_fields = false;
2755   _reporting_primitive_array_values = false;
2756   _reporting_string_values = false;
2757   _visit_stack = create_visit_stack();
2758 
2759 
2760   CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2761 }
2762 
2763 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2764                                            Handle initial_object,
2765                                            AdvancedHeapWalkContext callbacks,
2766                                            const void* user_data) {
2767   _is_advanced_heap_walk = true;
2768   _tag_map = tag_map;
2769   _initial_object = initial_object;
2770   _following_object_refs = true;
2771   _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2772   _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2773   _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2774   _visit_stack = create_visit_stack();
2775 
2776   CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2777 }
2778 
2779 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2780   if (_following_object_refs) {
2781     assert(_visit_stack != NULL, "checking");
2782     delete _visit_stack;
2783     _visit_stack = NULL;
2784   }
2785 }
2786 
2787 // an array references its class and has a reference to
2788 // each element in the array
2789 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2790   objArrayOop array = objArrayOop(o);
2791 
2792   // array reference to its class
2793   oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2794   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2795     return false;
2796   }
2797 
2798   // iterate over the array and report each reference to a
2799   // non-null element
2800   for (int index=0; index<array->length(); index++) {
2801     oop elem = array->obj_at(index);
2802     if (elem == NULL) {
2803       continue;
2804     }
2805 
2806     // report the array reference o[index] = elem
2807     if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2808       return false;
2809     }
2810   }
2811   return true;
2812 }
2813 
2814 // a type array references its class
2815 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2816   Klass* k = o->klass();
2817   oop mirror = k->java_mirror();
2818   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2819     return false;
2820   }
2821 
2822   // report the array contents if required
2823   if (is_reporting_primitive_array_values()) {
2824     if (!CallbackInvoker::report_primitive_array_values(o)) {
2825       return false;
2826     }
2827   }
2828   return true;
2829 }
2830 
2831 #ifdef ASSERT
2832 // verify that a static oop field is in range
2833 static inline bool verify_static_oop(InstanceKlass* ik,
2834                                      oop mirror, int offset) {
2835   address obj_p = (address)mirror + offset;
2836   address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2837   address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2838   assert(end >= start, "sanity check");
2839 
2840   if (obj_p >= start && obj_p < end) {
2841     return true;
2842   } else {
2843     return false;
2844   }
2845 }
2846 #endif // #ifdef ASSERT
2847 
2848 // a class references its super class, interfaces, class loader, ...
2849 // and finally its static fields
2850 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2851   int i;
2852   Klass* klass = java_lang_Class::as_Klass(java_class);
2853 
2854   if (klass->is_instance_klass()) {
2855     InstanceKlass* ik = InstanceKlass::cast(klass);
2856 
2857     // Ignore the class if it hasn't been initialized yet
2858     if (!ik->is_linked()) {
2859       return true;
2860     }
2861 
2862     // get the java mirror
2863     oop mirror = klass->java_mirror();
2864 
2865     // super (only if something more interesting than java.lang.Object)
2866     Klass* java_super = ik->java_super();
2867     if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2868       oop super = java_super->java_mirror();
2869       if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2870         return false;
2871       }
2872     }
2873 
2874     // class loader
2875     oop cl = ik->class_loader();
2876     if (cl != NULL) {
2877       if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2878         return false;
2879       }
2880     }
2881 
2882     // protection domain
2883     oop pd = ik->protection_domain();
2884     if (pd != NULL) {
2885       if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2886         return false;
2887       }
2888     }
2889 
2890     // signers
2891     oop signers = ik->signers();
2892     if (signers != NULL) {
2893       if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2894         return false;
2895       }
2896     }
2897 
2898     // references from the constant pool
2899     {
2900       ConstantPool* pool = ik->constants();
2901       for (int i = 1; i < pool->length(); i++) {
2902         constantTag tag = pool->tag_at(i).value();
2903         if (tag.is_string() || tag.is_klass()) {
2904           oop entry;
2905           if (tag.is_string()) {
2906             entry = pool->resolved_string_at(i);
2907             // If the entry is non-null it is resolved.
2908             if (entry == NULL) continue;
2909           } else {
2910             entry = pool->resolved_klass_at(i)->java_mirror();
2911           }
2912           if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2913             return false;
2914           }
2915         }
2916       }
2917     }
2918 
2919     // interfaces
2920     // (These will already have been reported as references from the constant pool
2921     //  but are specified by IterateOverReachableObjects and must be reported).
2922     Array<Klass*>* interfaces = ik->local_interfaces();
2923     for (i = 0; i < interfaces->length(); i++) {
2924       oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2925       if (interf == NULL) {
2926         continue;
2927       }
2928       if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2929         return false;
2930       }
2931     }
2932 
2933     // iterate over the static fields
2934 
2935     ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2936     for (i=0; i<field_map->field_count(); i++) {
2937       ClassFieldDescriptor* field = field_map->field_at(i);
2938       char type = field->field_type();
2939       if (!is_primitive_field_type(type)) {
2940         oop fld_o = mirror->obj_field(field->field_offset());
2941         assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2942         if (fld_o != NULL) {
2943           int slot = field->field_index();
2944           if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2945             delete field_map;
2946             return false;
2947           }
2948         }
2949       } else {
2950          if (is_reporting_primitive_fields()) {
2951            address addr = (address)mirror + field->field_offset();
2952            int slot = field->field_index();
2953            if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2954              delete field_map;
2955              return false;
2956           }
2957         }
2958       }
2959     }
2960     delete field_map;
2961 
2962     return true;
2963   }
2964 
2965   return true;
2966 }
2967 
2968 // an object references a class and its instance fields
2969 // (static fields are ignored here as we report these as
2970 // references from the class).
2971 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2972   // reference to the class
2973   if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2974     return false;
2975   }
2976 
2977   // iterate over instance fields
2978   ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2979   for (int i=0; i<field_map->field_count(); i++) {
2980     ClassFieldDescriptor* field = field_map->field_at(i);
2981     char type = field->field_type();
2982     if (!is_primitive_field_type(type)) {
2983       oop fld_o = o->obj_field(field->field_offset());
2984       // ignore any objects that aren't visible to profiler
2985       if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2986         assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2987                "have references to Klass* anymore");
2988         int slot = field->field_index();
2989         if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2990           return false;
2991         }
2992       }
2993     } else {
2994       if (is_reporting_primitive_fields()) {
2995         // primitive instance field
2996         address addr = (address)o + field->field_offset();
2997         int slot = field->field_index();
2998         if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2999           return false;
3000         }
3001       }
3002     }
3003   }
3004 
3005   // if the object is a java.lang.String
3006   if (is_reporting_string_values() &&
3007       o->klass() == SystemDictionary::String_klass()) {
3008     if (!CallbackInvoker::report_string_value(o)) {
3009       return false;
3010     }
3011   }
3012   return true;
3013 }
3014 
3015 
3016 // Collects all simple (non-stack) roots except for threads;
3017 // threads are handled in collect_stack_roots() as an optimization.
3018 // if there's a heap root callback provided then the callback is
3019 // invoked for each simple root.
3020 // if an object reference callback is provided then all simple
3021 // roots are pushed onto the marking stack so that they can be
3022 // processed later
3023 //
3024 inline bool VM_HeapWalkOperation::collect_simple_roots() {
3025   SimpleRootsClosure blk;
3026 
3027   // JNI globals
3028   blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
3029   JNIHandles::oops_do(&blk);
3030   if (blk.stopped()) {
3031     return false;
3032   }
3033 
3034   // Preloaded classes and loader from the system dictionary
3035   blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
3036   SystemDictionary::always_strong_oops_do(&blk);
3037   ClassLoaderDataGraph::always_strong_oops_do(&blk, false);
3038   if (blk.stopped()) {
3039     return false;
3040   }
3041 
3042   // Inflated monitors
3043   blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3044   ObjectSynchronizer::oops_do(&blk);
3045   if (blk.stopped()) {
3046     return false;
3047   }
3048 
3049   // threads are now handled in collect_stack_roots()
3050 
3051   // Other kinds of roots maintained by HotSpot
3052   // Many of these won't be visible but others (such as instances of important
3053   // exceptions) will be visible.
3054   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3055   Universe::oops_do(&blk);
3056 
3057   // If there are any non-perm roots in the code cache, visit them.
3058   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3059   CodeBlobToOopClosure look_in_blobs(&blk, !CodeBlobToOopClosure::FixRelocations);
3060   CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3061 
3062   return true;
3063 }
3064 
3065 // Walk the stack of a given thread and find all references (locals
3066 // and JNI calls) and report these as stack references
3067 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3068                                                       JNILocalRootsClosure* blk)
3069 {
3070   oop threadObj = java_thread->threadObj();
3071   assert(threadObj != NULL, "sanity check");
3072 
3073   // only need to get the thread's tag once per thread
3074   jlong thread_tag = tag_for(_tag_map, threadObj);
3075 
3076   // also need the thread id
3077   jlong tid = java_lang_Thread::thread_id(threadObj);
3078 
3079 
3080   if (java_thread->has_last_Java_frame()) {
3081 
3082     // vframes are resource allocated
3083     Thread* current_thread = Thread::current();
3084     ResourceMark rm(current_thread);
3085     HandleMark hm(current_thread);
3086 
3087     RegisterMap reg_map(java_thread);
3088     frame f = java_thread->last_frame();
3089     vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
3090 
3091     bool is_top_frame = true;
3092     int depth = 0;
3093     frame* last_entry_frame = NULL;
3094 
3095     while (vf != NULL) {
3096       if (vf->is_java_frame()) {
3097 
3098         // java frame (interpreted, compiled, ...)
3099         javaVFrame *jvf = javaVFrame::cast(vf);
3100 
3101         // the jmethodID
3102         jmethodID method = jvf->method()->jmethod_id();
3103 
3104         if (!(jvf->method()->is_native())) {
3105           jlocation bci = (jlocation)jvf->bci();
3106           StackValueCollection* locals = jvf->locals();
3107           for (int slot=0; slot<locals->size(); slot++) {
3108             if (locals->at(slot)->type() == T_OBJECT) {
3109               oop o = locals->obj_at(slot)();
3110               if (o == NULL) {
3111                 continue;
3112               }
3113 
3114               // stack reference
3115               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3116                                                    bci, slot, o)) {
3117                 return false;
3118               }
3119             }
3120           }
3121 
3122           StackValueCollection* exprs = jvf->expressions();
3123           for (int index=0; index < exprs->size(); index++) {
3124             if (exprs->at(index)->type() == T_OBJECT) {
3125               oop o = exprs->obj_at(index)();
3126               if (o == NULL) {
3127                 continue;
3128               }
3129 
3130               // stack reference
3131               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3132                                                    bci, locals->size() + index, o)) {
3133                 return false;
3134               }
3135             }
3136           }
3137 
3138           // Follow oops from compiled nmethod
3139           if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
3140             blk->set_context(thread_tag, tid, depth, method);
3141             jvf->cb()->as_nmethod()->oops_do(blk);
3142           }
3143         } else {
3144           blk->set_context(thread_tag, tid, depth, method);
3145           if (is_top_frame) {
3146             // JNI locals for the top frame.
3147             java_thread->active_handles()->oops_do(blk);
3148           } else {
3149             if (last_entry_frame != NULL) {
3150               // JNI locals for the entry frame
3151               assert(last_entry_frame->is_entry_frame(), "checking");
3152               last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3153             }
3154           }
3155         }
3156         last_entry_frame = NULL;
3157         depth++;
3158       } else {
3159         // externalVFrame - for an entry frame then we report the JNI locals
3160         // when we find the corresponding javaVFrame
3161         frame* fr = vf->frame_pointer();
3162         assert(fr != NULL, "sanity check");
3163         if (fr->is_entry_frame()) {
3164           last_entry_frame = fr;
3165         }
3166       }
3167 
3168       vf = vf->sender();
3169       is_top_frame = false;
3170     }
3171   } else {
3172     // no last java frame but there may be JNI locals
3173     blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3174     java_thread->active_handles()->oops_do(blk);
3175   }
3176   return true;
3177 }
3178 
3179 
3180 // Collects the simple roots for all threads and collects all
3181 // stack roots - for each thread it walks the execution
3182 // stack to find all references and local JNI refs.
3183 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3184   JNILocalRootsClosure blk;
3185   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
3186     oop threadObj = thread->threadObj();
3187     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3188       // Collect the simple root for this thread before we
3189       // collect its stack roots
3190       if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3191                                                threadObj)) {
3192         return false;
3193       }
3194       if (!collect_stack_roots(thread, &blk)) {
3195         return false;
3196       }
3197     }
3198   }
3199   return true;
3200 }
3201 
3202 // visit an object
3203 // first mark the object as visited
3204 // second get all the outbound references from this object (in other words, all
3205 // the objects referenced by this object).
3206 //
3207 bool VM_HeapWalkOperation::visit(oop o) {
3208   // mark object as visited
3209   assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3210   ObjectMarker::mark(o);
3211 
3212   // instance
3213   if (o->is_instance()) {
3214     if (o->klass() == SystemDictionary::Class_klass()) {
3215       if (!java_lang_Class::is_primitive(o)) {
3216         // a java.lang.Class
3217         return iterate_over_class(o);
3218       }
3219     } else {
3220       return iterate_over_object(o);
3221     }
3222   }
3223 
3224   // object array
3225   if (o->is_objArray()) {
3226     return iterate_over_array(o);
3227   }
3228 
3229   // type array
3230   if (o->is_typeArray()) {
3231     return iterate_over_type_array(o);
3232   }
3233 
3234   return true;
3235 }
3236 
3237 void VM_HeapWalkOperation::doit() {
3238   ResourceMark rm;
3239   ObjectMarkerController marker;
3240   ClassFieldMapCacheMark cm;
3241 
3242   assert(visit_stack()->is_empty(), "visit stack must be empty");
3243 
3244   // the heap walk starts with an initial object or the heap roots
3245   if (initial_object().is_null()) {
3246     // If either collect_stack_roots() or collect_simple_roots()
3247     // returns false at this point, then there are no mark bits
3248     // to reset.
3249     ObjectMarker::set_needs_reset(false);
3250 
3251     // Calling collect_stack_roots() before collect_simple_roots()
3252     // can result in a big performance boost for an agent that is
3253     // focused on analyzing references in the thread stacks.
3254     if (!collect_stack_roots()) return;
3255 
3256     if (!collect_simple_roots()) return;
3257 
3258     // no early return so enable heap traversal to reset the mark bits
3259     ObjectMarker::set_needs_reset(true);
3260   } else {
3261     visit_stack()->push(initial_object()());
3262   }
3263 
3264   // object references required
3265   if (is_following_references()) {
3266 
3267     // visit each object until all reachable objects have been
3268     // visited or the callback asked to terminate the iteration.
3269     while (!visit_stack()->is_empty()) {
3270       oop o = visit_stack()->pop();
3271       if (!ObjectMarker::visited(o)) {
3272         if (!visit(o)) {
3273           break;
3274         }
3275       }
3276     }
3277   }
3278 }
3279 
3280 // iterate over all objects that are reachable from a set of roots
3281 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3282                                                  jvmtiStackReferenceCallback stack_ref_callback,
3283                                                  jvmtiObjectReferenceCallback object_ref_callback,
3284                                                  const void* user_data) {
3285   MutexLocker ml(Heap_lock);
3286   BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3287   VM_HeapWalkOperation op(this, Handle(), context, user_data);
3288   VMThread::execute(&op);
3289 }
3290 
3291 // iterate over all objects that are reachable from a given object
3292 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3293                                                              jvmtiObjectReferenceCallback object_ref_callback,
3294                                                              const void* user_data) {
3295   oop obj = JNIHandles::resolve(object);
3296   Handle initial_object(Thread::current(), obj);
3297 
3298   MutexLocker ml(Heap_lock);
3299   BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3300   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3301   VMThread::execute(&op);
3302 }
3303 
3304 // follow references from an initial object or the GC roots
3305 void JvmtiTagMap::follow_references(jint heap_filter,
3306                                     Klass* klass,
3307                                     jobject object,
3308                                     const jvmtiHeapCallbacks* callbacks,
3309                                     const void* user_data)
3310 {
3311   oop obj = JNIHandles::resolve(object);
3312   Handle initial_object(Thread::current(), obj);
3313 
3314   MutexLocker ml(Heap_lock);
3315   AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3316   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3317   VMThread::execute(&op);
3318 }
3319 
3320 
3321 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3322   // No locks during VM bring-up (0 threads) and no safepoints after main
3323   // thread creation and before VMThread creation (1 thread); initial GC
3324   // verification can happen in that window which gets to here.
3325   assert(Threads::number_of_threads() <= 1 ||
3326          SafepointSynchronize::is_at_safepoint(),
3327          "must be executed at a safepoint");
3328   if (JvmtiEnv::environments_might_exist()) {
3329     JvmtiEnvIterator it;
3330     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3331       JvmtiTagMap* tag_map = env->tag_map();
3332       if (tag_map != NULL && !tag_map->is_empty()) {
3333         tag_map->do_weak_oops(is_alive, f);
3334       }
3335     }
3336   }
3337 }
3338 
3339 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3340 
3341   // does this environment have the OBJECT_FREE event enabled
3342   bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3343 
3344   // counters used for trace message
3345   int freed = 0;
3346   int moved = 0;
3347 
3348   JvmtiTagHashmap* hashmap = this->hashmap();
3349 
3350   // reenable sizing (if disabled)
3351   hashmap->set_resizing_enabled(true);
3352 
3353   // if the hashmap is empty then we can skip it
3354   if (hashmap->_entry_count == 0) {
3355     return;
3356   }
3357 
3358   // now iterate through each entry in the table
3359 
3360   JvmtiTagHashmapEntry** table = hashmap->table();
3361   int size = hashmap->size();
3362 
3363   JvmtiTagHashmapEntry* delayed_add = NULL;
3364 
3365   for (int pos = 0; pos < size; ++pos) {
3366     JvmtiTagHashmapEntry* entry = table[pos];
3367     JvmtiTagHashmapEntry* prev = NULL;
3368 
3369     while (entry != NULL) {
3370       JvmtiTagHashmapEntry* next = entry->next();
3371 
3372       // has object been GC'ed
3373       if (!is_alive->do_object_b(entry->object_peek())) {
3374         // grab the tag
3375         jlong tag = entry->tag();
3376         guarantee(tag != 0, "checking");
3377 
3378         // remove GC'ed entry from hashmap and return the
3379         // entry to the free list
3380         hashmap->remove(prev, pos, entry);
3381         destroy_entry(entry);
3382 
3383         // post the event to the profiler
3384         if (post_object_free) {
3385           JvmtiExport::post_object_free(env(), tag);
3386         }
3387 
3388         ++freed;
3389       } else {
3390         f->do_oop(entry->object_addr());
3391         oop new_oop = entry->object_peek();
3392 
3393         // if the object has moved then re-hash it and move its
3394         // entry to its new location.
3395         unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3396         if (new_pos != (unsigned int)pos) {
3397           if (prev == NULL) {
3398             table[pos] = next;
3399           } else {
3400             prev->set_next(next);
3401           }
3402           if (new_pos < (unsigned int)pos) {
3403             entry->set_next(table[new_pos]);
3404             table[new_pos] = entry;
3405           } else {
3406             // Delay adding this entry to it's new position as we'd end up
3407             // hitting it again during this iteration.
3408             entry->set_next(delayed_add);
3409             delayed_add = entry;
3410           }
3411           moved++;
3412         } else {
3413           // object didn't move
3414           prev = entry;
3415         }
3416       }
3417 
3418       entry = next;
3419     }
3420   }
3421 
3422   // Re-add all the entries which were kept aside
3423   while (delayed_add != NULL) {
3424     JvmtiTagHashmapEntry* next = delayed_add->next();
3425     unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object_peek(), size);
3426     delayed_add->set_next(table[pos]);
3427     table[pos] = delayed_add;
3428     delayed_add = next;
3429   }
3430 
3431   log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)",
3432                                   hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved);
3433 }