1 /*
   2  * Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 # include "incls/_precompiled.incl"
  26 # include "incls/_jvmtiTagMap.cpp.incl"
  27 
  28 // JvmtiTagHashmapEntry
  29 //
  30 // Each entry encapsulates a JNI weak reference to the tagged object
  31 // and the tag value. In addition an entry includes a next pointer which
  32 // is used to chain entries together.
  33 
  34 class JvmtiTagHashmapEntry : public CHeapObj {
  35  private:
  36   friend class JvmtiTagMap;
  37 
  38   jweak _object;                        // JNI weak ref to tagged object
  39   jlong _tag;                           // the tag
  40   JvmtiTagHashmapEntry* _next;          // next on the list
  41 
  42   inline void init(jweak object, jlong tag) {
  43     _object = object;
  44     _tag = tag;
  45     _next = NULL;
  46   }
  47 
  48   // constructor
  49   JvmtiTagHashmapEntry(jweak object, jlong tag)         { init(object, tag); }
  50 
  51  public:
  52 
  53   // accessor methods
  54   inline jweak object() const                           { return _object; }
  55   inline jlong tag() const                              { return _tag; }
  56 
  57   inline void set_tag(jlong tag) {
  58     assert(tag != 0, "can't be zero");
  59     _tag = tag;
  60   }
  61 
  62   inline JvmtiTagHashmapEntry* next() const             { return _next; }
  63   inline void set_next(JvmtiTagHashmapEntry* next)      { _next = next; }
  64 };
  65 
  66 
  67 // JvmtiTagHashmap
  68 //
  69 // A hashmap is essentially a table of pointers to entries. Entries
  70 // are hashed to a location, or position in the table, and then
  71 // chained from that location. The "key" for hashing is address of
  72 // the object, or oop. The "value" is the JNI weak reference to the
  73 // object and the tag value. Keys are not stored with the entry.
  74 // Instead the weak reference is resolved to obtain the key.
  75 //
  76 // A hashmap maintains a count of the number entries in the hashmap
  77 // and resizes if the number of entries exceeds a given threshold.
  78 // The threshold is specified as a percentage of the size - for
  79 // example a threshold of 0.75 will trigger the hashmap to resize
  80 // if the number of entries is >75% of table size.
  81 //
  82 // A hashmap provides functions for adding, removing, and finding
  83 // entries. It also provides a function to iterate over all entries
  84 // in the hashmap.
  85 
  86 class JvmtiTagHashmap : public CHeapObj {
  87  private:
  88   friend class JvmtiTagMap;
  89 
  90   enum {
  91     small_trace_threshold  = 10000,                  // threshold for tracing
  92     medium_trace_threshold = 100000,
  93     large_trace_threshold  = 1000000,
  94     initial_trace_threshold = small_trace_threshold
  95   };
  96 
  97   static int _sizes[];                  // array of possible hashmap sizes
  98   int _size;                            // actual size of the table
  99   int _size_index;                      // index into size table
 100 
 101   int _entry_count;                     // number of entries in the hashmap
 102 
 103   float _load_factor;                   // load factor as a % of the size
 104   int _resize_threshold;                // computed threshold to trigger resizing.
 105   bool _resizing_enabled;               // indicates if hashmap can resize
 106 
 107   int _trace_threshold;                 // threshold for trace messages
 108 
 109   JvmtiTagHashmapEntry** _table;        // the table of entries.
 110 
 111   // private accessors
 112   int resize_threshold() const                  { return _resize_threshold; }
 113   int trace_threshold() const                   { return _trace_threshold; }
 114 
 115   // initialize the hashmap
 116   void init(int size_index=0, float load_factor=4.0f) {
 117     int initial_size =  _sizes[size_index];
 118     _size_index = size_index;
 119     _size = initial_size;
 120     _entry_count = 0;
 121     if (TraceJVMTIObjectTagging) {
 122       _trace_threshold = initial_trace_threshold;
 123     } else {
 124       _trace_threshold = -1;
 125     }
 126     _load_factor = load_factor;
 127     _resize_threshold = (int)(_load_factor * _size);
 128     _resizing_enabled = true;
 129     size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
 130     _table = (JvmtiTagHashmapEntry**)os::malloc(s);
 131     if (_table == NULL) {
 132       vm_exit_out_of_memory(s, "unable to allocate initial hashtable for jvmti object tags");
 133     }
 134     for (int i=0; i<initial_size; i++) {
 135       _table[i] = NULL;
 136     }
 137   }
 138 
 139   // hash a given key (oop) with the specified size
 140   static unsigned int hash(oop key, int size) {
 141     // shift right to get better distribution (as these bits will be zero
 142     // with aligned addresses)
 143     unsigned int addr = (unsigned int)((intptr_t)key);
 144 #ifdef _LP64
 145     return (addr >> 3) % size;
 146 #else
 147     return (addr >> 2) % size;
 148 #endif
 149   }
 150 
 151   // hash a given key (oop)
 152   unsigned int hash(oop key) {
 153     return hash(key, _size);
 154   }
 155 
 156   // resize the hashmap - allocates a large table and re-hashes
 157   // all entries into the new table.
 158   void resize() {
 159     int new_size_index = _size_index+1;
 160     int new_size = _sizes[new_size_index];
 161     if (new_size < 0) {
 162       // hashmap already at maximum capacity
 163       return;
 164     }
 165 
 166     // allocate new table
 167     size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
 168     JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s);
 169     if (new_table == NULL) {
 170       warning("unable to allocate larger hashtable for jvmti object tags");
 171       set_resizing_enabled(false);
 172       return;
 173     }
 174 
 175     // initialize new table
 176     int i;
 177     for (i=0; i<new_size; i++) {
 178       new_table[i] = NULL;
 179     }
 180 
 181     // rehash all entries into the new table
 182     for (i=0; i<_size; i++) {
 183       JvmtiTagHashmapEntry* entry = _table[i];
 184       while (entry != NULL) {
 185         JvmtiTagHashmapEntry* next = entry->next();
 186         oop key = JNIHandles::resolve(entry->object());
 187         assert(key != NULL, "jni weak reference cleared!!");
 188         unsigned int h = hash(key, new_size);
 189         JvmtiTagHashmapEntry* anchor = new_table[h];
 190         if (anchor == NULL) {
 191           new_table[h] = entry;
 192           entry->set_next(NULL);
 193         } else {
 194           entry->set_next(anchor);
 195           new_table[h] = entry;
 196         }
 197         entry = next;
 198       }
 199     }
 200 
 201     // free old table and update settings.
 202     os::free((void*)_table);
 203     _table = new_table;
 204     _size_index = new_size_index;
 205     _size = new_size;
 206 
 207     // compute new resize threshold
 208     _resize_threshold = (int)(_load_factor * _size);
 209   }
 210 
 211 
 212   // internal remove function - remove an entry at a given position in the
 213   // table.
 214   inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
 215     assert(pos >= 0 && pos < _size, "out of range");
 216     if (prev == NULL) {
 217       _table[pos] = entry->next();
 218     } else {
 219       prev->set_next(entry->next());
 220     }
 221     assert(_entry_count > 0, "checking");
 222     _entry_count--;
 223   }
 224 
 225   // resizing switch
 226   bool is_resizing_enabled() const          { return _resizing_enabled; }
 227   void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
 228 
 229   // debugging
 230   void print_memory_usage();
 231   void compute_next_trace_threshold();
 232 
 233  public:
 234 
 235   // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
 236   // The preferred size is rounded down to an actual size.
 237   JvmtiTagHashmap(int size, float load_factor=0.0f) {
 238     int i=0;
 239     while (_sizes[i] < size) {
 240       if (_sizes[i] < 0) {
 241         assert(i > 0, "sanity check");
 242         i--;
 243         break;
 244       }
 245       i++;
 246     }
 247 
 248     // if a load factor is specified then use it, otherwise use default
 249     if (load_factor > 0.01f) {
 250       init(i, load_factor);
 251     } else {
 252       init(i);
 253     }
 254   }
 255 
 256   // create a JvmtiTagHashmap with default settings
 257   JvmtiTagHashmap() {
 258     init();
 259   }
 260 
 261   // release table when JvmtiTagHashmap destroyed
 262   ~JvmtiTagHashmap() {
 263     if (_table != NULL) {
 264       os::free((void*)_table);
 265       _table = NULL;
 266     }
 267   }
 268 
 269   // accessors
 270   int size() const                              { return _size; }
 271   JvmtiTagHashmapEntry** table() const          { return _table; }
 272   int entry_count() const                       { return _entry_count; }
 273 
 274   // find an entry in the hashmap, returns NULL if not found.
 275   inline JvmtiTagHashmapEntry* find(oop key) {
 276     unsigned int h = hash(key);
 277     JvmtiTagHashmapEntry* entry = _table[h];
 278     while (entry != NULL) {
 279       oop orig_key = JNIHandles::resolve(entry->object());
 280       assert(orig_key != NULL, "jni weak reference cleared!!");
 281       if (key == orig_key) {
 282         break;
 283       }
 284       entry = entry->next();
 285     }
 286     return entry;
 287   }
 288 
 289 
 290   // add a new entry to hashmap
 291   inline void add(oop key, JvmtiTagHashmapEntry* entry) {
 292     assert(key != NULL, "checking");
 293     assert(find(key) == NULL, "duplicate detected");
 294     unsigned int h = hash(key);
 295     JvmtiTagHashmapEntry* anchor = _table[h];
 296     if (anchor == NULL) {
 297       _table[h] = entry;
 298       entry->set_next(NULL);
 299     } else {
 300       entry->set_next(anchor);
 301       _table[h] = entry;
 302     }
 303 
 304     _entry_count++;
 305     if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
 306       assert(TraceJVMTIObjectTagging, "should only get here when tracing");
 307       print_memory_usage();
 308       compute_next_trace_threshold();
 309     }
 310 
 311     // if the number of entries exceed the threshold then resize
 312     if (entry_count() > resize_threshold() && is_resizing_enabled()) {
 313       resize();
 314     }
 315   }
 316 
 317   // remove an entry with the given key.
 318   inline JvmtiTagHashmapEntry* remove(oop key) {
 319     unsigned int h = hash(key);
 320     JvmtiTagHashmapEntry* entry = _table[h];
 321     JvmtiTagHashmapEntry* prev = NULL;
 322     while (entry != NULL) {
 323       oop orig_key = JNIHandles::resolve(entry->object());
 324       assert(orig_key != NULL, "jni weak reference cleared!!");
 325       if (key == orig_key) {
 326         break;
 327       }
 328       prev = entry;
 329       entry = entry->next();
 330     }
 331     if (entry != NULL) {
 332       remove(prev, h, entry);
 333     }
 334     return entry;
 335   }
 336 
 337   // iterate over all entries in the hashmap
 338   void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
 339 };
 340 
 341 // possible hashmap sizes - odd primes that roughly double in size.
 342 // To avoid excessive resizing the odd primes from 4801-76831 and
 343 // 76831-307261 have been removed. The list must be terminated by -1.
 344 int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
 345     2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
 346 
 347 
 348 // A supporting class for iterating over all entries in Hashmap
 349 class JvmtiTagHashmapEntryClosure {
 350  public:
 351   virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
 352 };
 353 
 354 
 355 // iterate over all entries in the hashmap
 356 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 357   for (int i=0; i<_size; i++) {
 358     JvmtiTagHashmapEntry* entry = _table[i];
 359     JvmtiTagHashmapEntry* prev = NULL;
 360     while (entry != NULL) {
 361       // obtain the next entry before invoking do_entry - this is
 362       // necessary because do_entry may remove the entry from the
 363       // hashmap.
 364       JvmtiTagHashmapEntry* next = entry->next();
 365       closure->do_entry(entry);
 366       entry = next;
 367      }
 368   }
 369 }
 370 
 371 // debugging
 372 void JvmtiTagHashmap::print_memory_usage() {
 373   intptr_t p = (intptr_t)this;
 374   tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
 375 
 376   // table + entries in KB
 377   int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
 378     entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
 379 
 380   int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
 381   tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
 382     entry_count(), hashmap_usage, weak_globals_usage);
 383 }
 384 
 385 // compute threshold for the next trace message
 386 void JvmtiTagHashmap::compute_next_trace_threshold() {
 387   if (trace_threshold() < medium_trace_threshold) {
 388     _trace_threshold += small_trace_threshold;
 389   } else {
 390     if (trace_threshold() < large_trace_threshold) {
 391       _trace_threshold += medium_trace_threshold;
 392     } else {
 393       _trace_threshold += large_trace_threshold;
 394     }
 395   }
 396 }
 397 
 398 // memory region for young generation
 399 MemRegion JvmtiTagMap::_young_gen;
 400 
 401 // get the memory region used for the young generation
 402 void JvmtiTagMap::get_young_generation() {
 403   CollectedHeap* ch = Universe::heap();
 404   switch (ch->kind()) {
 405     case (CollectedHeap::GenCollectedHeap): {
 406       _young_gen = ((GenCollectedHeap*)ch)->get_gen(0)->reserved();
 407       break;
 408     }
 409 #ifndef SERIALGC
 410     case (CollectedHeap::ParallelScavengeHeap): {
 411       _young_gen = ((ParallelScavengeHeap*)ch)->young_gen()->reserved();
 412       break;
 413     }
 414     case (CollectedHeap::G1CollectedHeap): {
 415       // Until a more satisfactory solution is implemented, all
 416       // oops in the tag map will require rehash at each gc.
 417       // This is a correct, if extremely inefficient solution.
 418       // See RFE 6621729 for related commentary.
 419       _young_gen = ch->reserved_region();
 420       break;
 421     }
 422 #endif  // !SERIALGC
 423     default:
 424       ShouldNotReachHere();
 425   }
 426 }
 427 
 428 // returns true if oop is in the young generation
 429 inline bool JvmtiTagMap::is_in_young(oop o) {
 430   assert(_young_gen.start() != NULL, "checking");
 431   void* p = (void*)o;
 432   bool in_young = _young_gen.contains(p);
 433   return in_young;
 434 }
 435 
 436 // returns the appropriate hashmap for a given object
 437 inline JvmtiTagHashmap* JvmtiTagMap::hashmap_for(oop o) {
 438   if (is_in_young(o)) {
 439     return _hashmap[0];
 440   } else {
 441     return _hashmap[1];
 442   }
 443 }
 444 
 445 
 446 // create a JvmtiTagMap
 447 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
 448   _env(env),
 449   _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
 450   _free_entries(NULL),
 451   _free_entries_count(0)
 452 {
 453   assert(JvmtiThreadState_lock->is_locked(), "sanity check");
 454   assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
 455 
 456   // create the hashmaps
 457   for (int i=0; i<n_hashmaps; i++) {
 458     _hashmap[i] = new JvmtiTagHashmap();
 459   }
 460 
 461   // get the memory region used by the young generation
 462   get_young_generation();
 463 
 464   // finally add us to the environment
 465   ((JvmtiEnvBase *)env)->set_tag_map(this);
 466 }
 467 
 468 
 469 // destroy a JvmtiTagMap
 470 JvmtiTagMap::~JvmtiTagMap() {
 471 
 472   // no lock acquired as we assume the enclosing environment is
 473   // also being destroryed.
 474   ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
 475 
 476   // iterate over the hashmaps and destroy each of the entries
 477   for (int i=0; i<n_hashmaps; i++) {
 478     JvmtiTagHashmap* hashmap = _hashmap[i];
 479     JvmtiTagHashmapEntry** table = hashmap->table();
 480     for (int j=0; j<hashmap->size(); j++) {
 481       JvmtiTagHashmapEntry *entry = table[j];
 482       while (entry != NULL) {
 483         JvmtiTagHashmapEntry* next = entry->next();
 484         jweak ref = entry->object();
 485         JNIHandles::destroy_weak_global(ref);
 486         delete entry;
 487         entry = next;
 488       }
 489     }
 490 
 491     // finally destroy the hashmap
 492     delete hashmap;
 493   }
 494 
 495   // remove any entries on the free list
 496   JvmtiTagHashmapEntry* entry = _free_entries;
 497   while (entry != NULL) {
 498     JvmtiTagHashmapEntry* next = entry->next();
 499     delete entry;
 500     entry = next;
 501   }
 502 }
 503 
 504 // create a hashmap entry
 505 // - if there's an entry on the (per-environment) free list then this
 506 // is returned. Otherwise an new entry is allocated.
 507 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(jweak ref, jlong tag) {
 508   assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
 509   JvmtiTagHashmapEntry* entry;
 510   if (_free_entries == NULL) {
 511     entry = new JvmtiTagHashmapEntry(ref, tag);
 512   } else {
 513     assert(_free_entries_count > 0, "mismatched _free_entries_count");
 514     _free_entries_count--;
 515     entry = _free_entries;
 516     _free_entries = entry->next();
 517     entry->init(ref, tag);
 518   }
 519   return entry;
 520 }
 521 
 522 // destroy an entry by returning it to the free list
 523 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
 524   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 525   // limit the size of the free list
 526   if (_free_entries_count >= max_free_entries) {
 527     delete entry;
 528   } else {
 529     entry->set_next(_free_entries);
 530     _free_entries = entry;
 531     _free_entries_count++;
 532   }
 533 }
 534 
 535 // returns the tag map for the given environments. If the tag map
 536 // doesn't exist then it is created.
 537 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
 538   JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
 539   if (tag_map == NULL) {
 540     MutexLocker mu(JvmtiThreadState_lock);
 541     tag_map = ((JvmtiEnvBase *)env)->tag_map();
 542     if (tag_map == NULL) {
 543       tag_map = new JvmtiTagMap(env);
 544     }
 545   } else {
 546     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
 547   }
 548   return tag_map;
 549 }
 550 
 551 // iterate over all entries in the tag map.
 552 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
 553   for (int i=0; i<n_hashmaps; i++) {
 554     JvmtiTagHashmap* hashmap = _hashmap[i];
 555     hashmap->entry_iterate(closure);
 556   }
 557 }
 558 
 559 // returns true if the hashmaps are empty
 560 bool JvmtiTagMap::is_empty() {
 561   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
 562   assert(n_hashmaps == 2, "not implemented");
 563   return ((_hashmap[0]->entry_count() == 0) && (_hashmap[1]->entry_count() == 0));
 564 }
 565 
 566 
 567 // Return the tag value for an object, or 0 if the object is
 568 // not tagged
 569 //
 570 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
 571   JvmtiTagHashmapEntry* entry = tag_map->hashmap_for(o)->find(o);
 572   if (entry == NULL) {
 573     return 0;
 574   } else {
 575     return entry->tag();
 576   }
 577 }
 578 
 579 // If the object is a java.lang.Class then return the klassOop,
 580 // otherwise return the original object
 581 static inline oop klassOop_if_java_lang_Class(oop o) {
 582   if (o->klass() == SystemDictionary::Class_klass()) {
 583     if (!java_lang_Class::is_primitive(o)) {
 584       o = (oop)java_lang_Class::as_klassOop(o);
 585       assert(o != NULL, "class for non-primitive mirror must exist");
 586     }
 587   }
 588   return o;
 589 }
 590 
 591 // A CallbackWrapper is a support class for querying and tagging an object
 592 // around a callback to a profiler. The constructor does pre-callback
 593 // work to get the tag value, klass tag value, ... and the destructor
 594 // does the post-callback work of tagging or untagging the object.
 595 //
 596 // {
 597 //   CallbackWrapper wrapper(tag_map, o);
 598 //
 599 //   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
 600 //
 601 // } // wrapper goes out of scope here which results in the destructor
 602 //      checking to see if the object has been tagged, untagged, or the
 603 //      tag value has changed.
 604 //
 605 class CallbackWrapper : public StackObj {
 606  private:
 607   JvmtiTagMap* _tag_map;
 608   JvmtiTagHashmap* _hashmap;
 609   JvmtiTagHashmapEntry* _entry;
 610   oop _o;
 611   jlong _obj_size;
 612   jlong _obj_tag;
 613   klassOop _klass;         // the object's class
 614   jlong _klass_tag;
 615 
 616  protected:
 617   JvmtiTagMap* tag_map() const      { return _tag_map; }
 618 
 619   // invoked post-callback to tag, untag, or update the tag of an object
 620   void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
 621                                        JvmtiTagHashmapEntry* entry, jlong obj_tag);
 622  public:
 623   CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
 624     assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
 625            "MT unsafe or must be VM thread");
 626 
 627     // for Classes the klassOop is tagged
 628     _o = klassOop_if_java_lang_Class(o);
 629 
 630     // object size
 631     _obj_size = _o->size() * wordSize;
 632 
 633     // record the context
 634     _tag_map = tag_map;
 635     _hashmap = tag_map->hashmap_for(_o);
 636     _entry = _hashmap->find(_o);
 637 
 638     // get object tag
 639     _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
 640 
 641     // get the class and the class's tag value
 642     if (_o == o) {
 643       _klass = _o->klass();
 644     } else {
 645       // if the object represents a runtime class then use the
 646       // tag for java.lang.Class
 647       _klass = SystemDictionary::Class_klass();
 648     }
 649     _klass_tag = tag_for(tag_map, _klass);
 650   }
 651 
 652   ~CallbackWrapper() {
 653     post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
 654   }
 655 
 656   inline jlong* obj_tag_p()                     { return &_obj_tag; }
 657   inline jlong obj_size() const                 { return _obj_size; }
 658   inline jlong obj_tag() const                  { return _obj_tag; }
 659   inline klassOop klass() const                 { return _klass; }
 660   inline jlong klass_tag() const                { return _klass_tag; }
 661 };
 662 
 663 
 664 
 665 // callback post-callback to tag, untag, or update the tag of an object
 666 void inline CallbackWrapper::post_callback_tag_update(oop o,
 667                                                       JvmtiTagHashmap* hashmap,
 668                                                       JvmtiTagHashmapEntry* entry,
 669                                                       jlong obj_tag) {
 670   if (entry == NULL) {
 671     if (obj_tag != 0) {
 672       // callback has tagged the object
 673       assert(Thread::current()->is_VM_thread(), "must be VMThread");
 674       HandleMark hm;
 675       Handle h(o);
 676       jweak ref = JNIHandles::make_weak_global(h);
 677       entry = tag_map()->create_entry(ref, obj_tag);
 678       hashmap->add(o, entry);
 679     }
 680   } else {
 681     // object was previously tagged - the callback may have untagged
 682     // the object or changed the tag value
 683     if (obj_tag == 0) {
 684       jweak ref = entry->object();
 685 
 686       JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
 687       assert(entry_removed == entry, "checking");
 688       tag_map()->destroy_entry(entry);
 689 
 690       JNIHandles::destroy_weak_global(ref);
 691     } else {
 692       if (obj_tag != entry->tag()) {
 693          entry->set_tag(obj_tag);
 694       }
 695     }
 696   }
 697 }
 698 
 699 // An extended CallbackWrapper used when reporting an object reference
 700 // to the agent.
 701 //
 702 // {
 703 //   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
 704 //
 705 //   (*callback)(wrapper.klass_tag(),
 706 //               wrapper.obj_size(),
 707 //               wrapper.obj_tag_p()
 708 //               wrapper.referrer_tag_p(), ...)
 709 //
 710 // } // wrapper goes out of scope here which results in the destructor
 711 //      checking to see if the referrer object has been tagged, untagged,
 712 //      or the tag value has changed.
 713 //
 714 class TwoOopCallbackWrapper : public CallbackWrapper {
 715  private:
 716   bool _is_reference_to_self;
 717   JvmtiTagHashmap* _referrer_hashmap;
 718   JvmtiTagHashmapEntry* _referrer_entry;
 719   oop _referrer;
 720   jlong _referrer_obj_tag;
 721   jlong _referrer_klass_tag;
 722   jlong* _referrer_tag_p;
 723 
 724   bool is_reference_to_self() const             { return _is_reference_to_self; }
 725 
 726  public:
 727   TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
 728     CallbackWrapper(tag_map, o)
 729   {
 730     // self reference needs to be handled in a special way
 731     _is_reference_to_self = (referrer == o);
 732 
 733     if (_is_reference_to_self) {
 734       _referrer_klass_tag = klass_tag();
 735       _referrer_tag_p = obj_tag_p();
 736     } else {
 737       // for Classes the klassOop is tagged
 738       _referrer = klassOop_if_java_lang_Class(referrer);
 739       // record the context
 740       _referrer_hashmap = tag_map->hashmap_for(_referrer);
 741       _referrer_entry = _referrer_hashmap->find(_referrer);
 742 
 743       // get object tag
 744       _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
 745       _referrer_tag_p = &_referrer_obj_tag;
 746 
 747       // get referrer class tag.
 748       klassOop k = (_referrer == referrer) ?  // Check if referrer is a class...
 749           _referrer->klass()                  // No, just get its class
 750          : SystemDictionary::Class_klass();   // Yes, its class is Class
 751       _referrer_klass_tag = tag_for(tag_map, k);
 752     }
 753   }
 754 
 755   ~TwoOopCallbackWrapper() {
 756     if (!is_reference_to_self()){
 757       post_callback_tag_update(_referrer,
 758                                _referrer_hashmap,
 759                                _referrer_entry,
 760                                _referrer_obj_tag);
 761     }
 762   }
 763 
 764   // address of referrer tag
 765   // (for a self reference this will return the same thing as obj_tag_p())
 766   inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
 767 
 768   // referrer's class tag
 769   inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
 770 };
 771 
 772 // tag an object
 773 //
 774 // This function is performance critical. If many threads attempt to tag objects
 775 // around the same time then it's possible that the Mutex associated with the
 776 // tag map will be a hot lock. Eliminating this lock will not eliminate the issue
 777 // because creating a JNI weak reference requires acquiring a global lock also.
 778 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
 779   MutexLocker ml(lock());
 780 
 781   // resolve the object
 782   oop o = JNIHandles::resolve_non_null(object);
 783 
 784   // for Classes we tag the klassOop
 785   o = klassOop_if_java_lang_Class(o);
 786 
 787   // see if the object is already tagged
 788   JvmtiTagHashmap* hashmap = hashmap_for(o);
 789   JvmtiTagHashmapEntry* entry = hashmap->find(o);
 790 
 791   // if the object is not already tagged then we tag it
 792   if (entry == NULL) {
 793     if (tag != 0) {
 794       HandleMark hm;
 795       Handle h(o);
 796       jweak ref = JNIHandles::make_weak_global(h);
 797 
 798       // the object may have moved because make_weak_global may
 799       // have blocked - thus it is necessary resolve the handle
 800       // and re-hash the object.
 801       o = h();
 802       entry = create_entry(ref, tag);
 803       hashmap_for(o)->add(o, entry);
 804     } else {
 805       // no-op
 806     }
 807   } else {
 808     // if the object is already tagged then we either update
 809     // the tag (if a new tag value has been provided)
 810     // or remove the object if the new tag value is 0.
 811     // Removing the object requires that we also delete the JNI
 812     // weak ref to the object.
 813     if (tag == 0) {
 814       jweak ref = entry->object();
 815       hashmap->remove(o);
 816       destroy_entry(entry);
 817       JNIHandles::destroy_weak_global(ref);
 818     } else {
 819       entry->set_tag(tag);
 820     }
 821   }
 822 }
 823 
 824 // get the tag for an object
 825 jlong JvmtiTagMap::get_tag(jobject object) {
 826   MutexLocker ml(lock());
 827 
 828   // resolve the object
 829   oop o = JNIHandles::resolve_non_null(object);
 830 
 831   // for Classes get the tag from the klassOop
 832   return tag_for(this, klassOop_if_java_lang_Class(o));
 833 }
 834 
 835 
 836 // Helper class used to describe the static or instance fields of a class.
 837 // For each field it holds the field index (as defined by the JVMTI specification),
 838 // the field type, and the offset.
 839 
 840 class ClassFieldDescriptor: public CHeapObj {
 841  private:
 842   int _field_index;
 843   int _field_offset;
 844   char _field_type;
 845  public:
 846   ClassFieldDescriptor(int index, char type, int offset) :
 847     _field_index(index), _field_type(type), _field_offset(offset) {
 848   }
 849   int field_index()  const  { return _field_index; }
 850   char field_type()  const  { return _field_type; }
 851   int field_offset() const  { return _field_offset; }
 852 };
 853 
 854 class ClassFieldMap: public CHeapObj {
 855  private:
 856   enum {
 857     initial_field_count = 5
 858   };
 859 
 860   // list of field descriptors
 861   GrowableArray<ClassFieldDescriptor*>* _fields;
 862 
 863   // constructor
 864   ClassFieldMap();
 865 
 866   // add a field
 867   void add(int index, char type, int offset);
 868 
 869   // returns the field count for the given class
 870   static int compute_field_count(instanceKlassHandle ikh);
 871 
 872  public:
 873   ~ClassFieldMap();
 874 
 875   // access
 876   int field_count()                     { return _fields->length(); }
 877   ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
 878 
 879   // functions to create maps of static or instance fields
 880   static ClassFieldMap* create_map_of_static_fields(klassOop k);
 881   static ClassFieldMap* create_map_of_instance_fields(oop obj);
 882 };
 883 
 884 ClassFieldMap::ClassFieldMap() {
 885   _fields = new (ResourceObj::C_HEAP) GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
 886 }
 887 
 888 ClassFieldMap::~ClassFieldMap() {
 889   for (int i=0; i<_fields->length(); i++) {
 890     delete _fields->at(i);
 891   }
 892   delete _fields;
 893 }
 894 
 895 void ClassFieldMap::add(int index, char type, int offset) {
 896   ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
 897   _fields->append(field);
 898 }
 899 
 900 // Returns a heap allocated ClassFieldMap to describe the static fields
 901 // of the given class.
 902 //
 903 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(klassOop k) {
 904   HandleMark hm;
 905   instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
 906 
 907   // create the field map
 908   ClassFieldMap* field_map = new ClassFieldMap();
 909 
 910   FilteredFieldStream f(ikh, false, false);
 911   int max_field_index = f.field_count()-1;
 912 
 913   int index = 0;
 914   for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
 915     // ignore instance fields
 916     if (!fld.access_flags().is_static()) {
 917       continue;
 918     }
 919     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 920   }
 921   return field_map;
 922 }
 923 
 924 // Returns a heap allocated ClassFieldMap to describe the instance fields
 925 // of the given class. All instance fields are included (this means public
 926 // and private fields declared in superclasses and superinterfaces too).
 927 //
 928 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
 929   HandleMark hm;
 930   instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
 931 
 932   // create the field map
 933   ClassFieldMap* field_map = new ClassFieldMap();
 934 
 935   FilteredFieldStream f(ikh, false, false);
 936 
 937   int max_field_index = f.field_count()-1;
 938 
 939   int index = 0;
 940   for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
 941     // ignore static fields
 942     if (fld.access_flags().is_static()) {
 943       continue;
 944     }
 945     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
 946   }
 947 
 948   return field_map;
 949 }
 950 
 951 // Helper class used to cache a ClassFileMap for the instance fields of
 952 // a cache. A JvmtiCachedClassFieldMap can be cached by an instanceKlass during
 953 // heap iteration and avoid creating a field map for each object in the heap
 954 // (only need to create the map when the first instance of a class is encountered).
 955 //
 956 class JvmtiCachedClassFieldMap : public CHeapObj {
 957  private:
 958    enum {
 959      initial_class_count = 200
 960    };
 961   ClassFieldMap* _field_map;
 962 
 963   ClassFieldMap* field_map() const          { return _field_map; }
 964 
 965   JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
 966   ~JvmtiCachedClassFieldMap();
 967 
 968   static GrowableArray<instanceKlass*>* _class_list;
 969   static void add_to_class_list(instanceKlass* ik);
 970 
 971  public:
 972   // returns the field map for a given object (returning map cached
 973   // by instanceKlass if possible
 974   static ClassFieldMap* get_map_of_instance_fields(oop obj);
 975 
 976   // removes the field map from all instanceKlasses - should be
 977   // called before VM operation completes
 978   static void clear_cache();
 979 
 980   // returns the number of ClassFieldMap cached by instanceKlasses
 981   static int cached_field_map_count();
 982 };
 983 
 984 GrowableArray<instanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
 985 
 986 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
 987   _field_map = field_map;
 988 }
 989 
 990 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
 991   if (_field_map != NULL) {
 992     delete _field_map;
 993   }
 994 }
 995 
 996 // Marker class to ensure that the class file map cache is only used in a defined
 997 // scope.
 998 class ClassFieldMapCacheMark : public StackObj {
 999  private:
1000    static bool _is_active;
1001  public:
1002    ClassFieldMapCacheMark() {
1003      assert(Thread::current()->is_VM_thread(), "must be VMThread");
1004      assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
1005      assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
1006      _is_active = true;
1007    }
1008    ~ClassFieldMapCacheMark() {
1009      JvmtiCachedClassFieldMap::clear_cache();
1010      _is_active = false;
1011    }
1012    static bool is_active() { return _is_active; }
1013 };
1014 
1015 bool ClassFieldMapCacheMark::_is_active;
1016 
1017 
1018 // record that the given instanceKlass is caching a field map
1019 void JvmtiCachedClassFieldMap::add_to_class_list(instanceKlass* ik) {
1020   if (_class_list == NULL) {
1021     _class_list = new (ResourceObj::C_HEAP) GrowableArray<instanceKlass*>(initial_class_count, true);
1022   }
1023   _class_list->push(ik);
1024 }
1025 
1026 // returns the instance field map for the given object
1027 // (returns field map cached by the instanceKlass if possible)
1028 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
1029   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1030   assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
1031 
1032   klassOop k = obj->klass();
1033   instanceKlass* ik = instanceKlass::cast(k);
1034 
1035   // return cached map if possible
1036   JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
1037   if (cached_map != NULL) {
1038     assert(cached_map->field_map() != NULL, "missing field list");
1039     return cached_map->field_map();
1040   } else {
1041     ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
1042     cached_map = new JvmtiCachedClassFieldMap(field_map);
1043     ik->set_jvmti_cached_class_field_map(cached_map);
1044     add_to_class_list(ik);
1045     return field_map;
1046   }
1047 }
1048 
1049 // remove the fields maps cached from all instanceKlasses
1050 void JvmtiCachedClassFieldMap::clear_cache() {
1051   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1052   if (_class_list != NULL) {
1053     for (int i = 0; i < _class_list->length(); i++) {
1054       instanceKlass* ik = _class_list->at(i);
1055       JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
1056       assert(cached_map != NULL, "should not be NULL");
1057       ik->set_jvmti_cached_class_field_map(NULL);
1058       delete cached_map;  // deletes the encapsulated field map
1059     }
1060     delete _class_list;
1061     _class_list = NULL;
1062   }
1063 }
1064 
1065 // returns the number of ClassFieldMap cached by instanceKlasses
1066 int JvmtiCachedClassFieldMap::cached_field_map_count() {
1067   return (_class_list == NULL) ? 0 : _class_list->length();
1068 }
1069 
1070 // helper function to indicate if an object is filtered by its tag or class tag
1071 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
1072                                               jlong klass_tag,
1073                                               int heap_filter) {
1074   // apply the heap filter
1075   if (obj_tag != 0) {
1076     // filter out tagged objects
1077     if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
1078   } else {
1079     // filter out untagged objects
1080     if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
1081   }
1082   if (klass_tag != 0) {
1083     // filter out objects with tagged classes
1084     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1085   } else {
1086     // filter out objects with untagged classes.
1087     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1088   }
1089   return false;
1090 }
1091 
1092 // helper function to indicate if an object is filtered by a klass filter
1093 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
1094   if (!klass_filter.is_null()) {
1095     if (obj->klass() != klass_filter()) {
1096       return true;
1097     }
1098   }
1099   return false;
1100 }
1101 
1102 // helper function to tell if a field is a primitive field or not
1103 static inline bool is_primitive_field_type(char type) {
1104   return (type != 'L' && type != '[');
1105 }
1106 
1107 // helper function to copy the value from location addr to jvalue.
1108 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1109   switch (value_type) {
1110     case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1111     case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
1112     case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
1113     case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
1114     case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
1115     case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
1116     case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
1117     case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
1118     default: ShouldNotReachHere();
1119   }
1120 }
1121 
1122 // helper function to invoke string primitive value callback
1123 // returns visit control flags
1124 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1125                                          CallbackWrapper* wrapper,
1126                                          oop str,
1127                                          void* user_data)
1128 {
1129   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1130 
1131   // get the string value and length
1132   // (string value may be offset from the base)
1133   int s_len = java_lang_String::length(str);
1134   typeArrayOop s_value = java_lang_String::value(str);
1135   int s_offset = java_lang_String::offset(str);
1136   jchar* value;
1137   if (s_len > 0) {
1138     value = s_value->char_at_addr(s_offset);
1139   } else {
1140     value = (jchar*) s_value->base(T_CHAR);
1141   }
1142 
1143   // invoke the callback
1144   return (*cb)(wrapper->klass_tag(),
1145                wrapper->obj_size(),
1146                wrapper->obj_tag_p(),
1147                value,
1148                (jint)s_len,
1149                user_data);
1150 }
1151 
1152 // helper function to invoke string primitive value callback
1153 // returns visit control flags
1154 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1155                                                   CallbackWrapper* wrapper,
1156                                                   oop obj,
1157                                                   void* user_data)
1158 {
1159   assert(obj->is_typeArray(), "not a primitive array");
1160 
1161   // get base address of first element
1162   typeArrayOop array = typeArrayOop(obj);
1163   BasicType type = typeArrayKlass::cast(array->klass())->element_type();
1164   void* elements = array->base(type);
1165 
1166   // jvmtiPrimitiveType is defined so this mapping is always correct
1167   jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1168 
1169   return (*cb)(wrapper->klass_tag(),
1170                wrapper->obj_size(),
1171                wrapper->obj_tag_p(),
1172                (jint)array->length(),
1173                elem_type,
1174                elements,
1175                user_data);
1176 }
1177 
1178 // helper function to invoke the primitive field callback for all static fields
1179 // of a given class
1180 static jint invoke_primitive_field_callback_for_static_fields
1181   (CallbackWrapper* wrapper,
1182    oop obj,
1183    jvmtiPrimitiveFieldCallback cb,
1184    void* user_data)
1185 {
1186   // for static fields only the index will be set
1187   static jvmtiHeapReferenceInfo reference_info = { 0 };
1188 
1189   assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1190   if (java_lang_Class::is_primitive(obj)) {
1191     return 0;
1192   }
1193   klassOop k = java_lang_Class::as_klassOop(obj);
1194   Klass* klass = k->klass_part();
1195 
1196   // ignore classes for object and type arrays
1197   if (!klass->oop_is_instance()) {
1198     return 0;
1199   }
1200 
1201   // ignore classes which aren't linked yet
1202   instanceKlass* ik = instanceKlass::cast(k);
1203   if (!ik->is_linked()) {
1204     return 0;
1205   }
1206 
1207   // get the field map
1208   ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
1209 
1210   // invoke the callback for each static primitive field
1211   for (int i=0; i<field_map->field_count(); i++) {
1212     ClassFieldDescriptor* field = field_map->field_at(i);
1213 
1214     // ignore non-primitive fields
1215     char type = field->field_type();
1216     if (!is_primitive_field_type(type)) {
1217       continue;
1218     }
1219     // one-to-one mapping
1220     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1221 
1222     // get offset and field value
1223     int offset = field->field_offset();
1224     address addr = (address)k + offset;
1225     jvalue value;
1226     copy_to_jvalue(&value, addr, value_type);
1227 
1228     // field index
1229     reference_info.field.index = field->field_index();
1230 
1231     // invoke the callback
1232     jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1233                      &reference_info,
1234                      wrapper->klass_tag(),
1235                      wrapper->obj_tag_p(),
1236                      value,
1237                      value_type,
1238                      user_data);
1239     if (res & JVMTI_VISIT_ABORT) {
1240       delete field_map;
1241       return res;
1242     }
1243   }
1244 
1245   delete field_map;
1246   return 0;
1247 }
1248 
1249 // helper function to invoke the primitive field callback for all instance fields
1250 // of a given object
1251 static jint invoke_primitive_field_callback_for_instance_fields(
1252   CallbackWrapper* wrapper,
1253   oop obj,
1254   jvmtiPrimitiveFieldCallback cb,
1255   void* user_data)
1256 {
1257   // for instance fields only the index will be set
1258   static jvmtiHeapReferenceInfo reference_info = { 0 };
1259 
1260   // get the map of the instance fields
1261   ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1262 
1263   // invoke the callback for each instance primitive field
1264   for (int i=0; i<fields->field_count(); i++) {
1265     ClassFieldDescriptor* field = fields->field_at(i);
1266 
1267     // ignore non-primitive fields
1268     char type = field->field_type();
1269     if (!is_primitive_field_type(type)) {
1270       continue;
1271     }
1272     // one-to-one mapping
1273     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1274 
1275     // get offset and field value
1276     int offset = field->field_offset();
1277     address addr = (address)obj + offset;
1278     jvalue value;
1279     copy_to_jvalue(&value, addr, value_type);
1280 
1281     // field index
1282     reference_info.field.index = field->field_index();
1283 
1284     // invoke the callback
1285     jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1286                      &reference_info,
1287                      wrapper->klass_tag(),
1288                      wrapper->obj_tag_p(),
1289                      value,
1290                      value_type,
1291                      user_data);
1292     if (res & JVMTI_VISIT_ABORT) {
1293       return res;
1294     }
1295   }
1296   return 0;
1297 }
1298 
1299 
1300 // VM operation to iterate over all objects in the heap (both reachable
1301 // and unreachable)
1302 class VM_HeapIterateOperation: public VM_Operation {
1303  private:
1304   ObjectClosure* _blk;
1305  public:
1306   VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1307 
1308   VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1309   void doit() {
1310     // allows class files maps to be cached during iteration
1311     ClassFieldMapCacheMark cm;
1312 
1313     // make sure that heap is parsable (fills TLABs with filler objects)
1314     Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1315 
1316     // Verify heap before iteration - if the heap gets corrupted then
1317     // JVMTI's IterateOverHeap will crash.
1318     if (VerifyBeforeIteration) {
1319       Universe::verify();
1320     }
1321 
1322     // do the iteration
1323     // If this operation encounters a bad object when using CMS,
1324     // consider using safe_object_iterate() which avoids perm gen
1325     // objects that may contain bad references.
1326     Universe::heap()->object_iterate(_blk);
1327 
1328     // when sharing is enabled we must iterate over the shared spaces
1329     if (UseSharedSpaces) {
1330       GenCollectedHeap* gch = GenCollectedHeap::heap();
1331       CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
1332       gen->ro_space()->object_iterate(_blk);
1333       gen->rw_space()->object_iterate(_blk);
1334     }
1335   }
1336 
1337 };
1338 
1339 
1340 // An ObjectClosure used to support the deprecated IterateOverHeap and
1341 // IterateOverInstancesOfClass functions
1342 class IterateOverHeapObjectClosure: public ObjectClosure {
1343  private:
1344   JvmtiTagMap* _tag_map;
1345   KlassHandle _klass;
1346   jvmtiHeapObjectFilter _object_filter;
1347   jvmtiHeapObjectCallback _heap_object_callback;
1348   const void* _user_data;
1349 
1350   // accessors
1351   JvmtiTagMap* tag_map() const                    { return _tag_map; }
1352   jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
1353   jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1354   KlassHandle klass() const                       { return _klass; }
1355   const void* user_data() const                   { return _user_data; }
1356 
1357   // indicates if iteration has been aborted
1358   bool _iteration_aborted;
1359   bool is_iteration_aborted() const               { return _iteration_aborted; }
1360   void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
1361 
1362  public:
1363   IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1364                                KlassHandle klass,
1365                                jvmtiHeapObjectFilter object_filter,
1366                                jvmtiHeapObjectCallback heap_object_callback,
1367                                const void* user_data) :
1368     _tag_map(tag_map),
1369     _klass(klass),
1370     _object_filter(object_filter),
1371     _heap_object_callback(heap_object_callback),
1372     _user_data(user_data),
1373     _iteration_aborted(false)
1374   {
1375   }
1376 
1377   void do_object(oop o);
1378 };
1379 
1380 // invoked for each object in the heap
1381 void IterateOverHeapObjectClosure::do_object(oop o) {
1382   // check if iteration has been halted
1383   if (is_iteration_aborted()) return;
1384 
1385   // ignore any objects that aren't visible to profiler
1386   if (!ServiceUtil::visible_oop(o)) return;
1387 
1388   // instanceof check when filtering by klass
1389   if (!klass().is_null() && !o->is_a(klass()())) {
1390     return;
1391   }
1392   // prepare for the calllback
1393   CallbackWrapper wrapper(tag_map(), o);
1394 
1395   // if the object is tagged and we're only interested in untagged objects
1396   // then don't invoke the callback. Similiarly, if the object is untagged
1397   // and we're only interested in tagged objects we skip the callback.
1398   if (wrapper.obj_tag() != 0) {
1399     if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1400   } else {
1401     if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1402   }
1403 
1404   // invoke the agent's callback
1405   jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1406                                                        wrapper.obj_size(),
1407                                                        wrapper.obj_tag_p(),
1408                                                        (void*)user_data());
1409   if (control == JVMTI_ITERATION_ABORT) {
1410     set_iteration_aborted(true);
1411   }
1412 }
1413 
1414 // An ObjectClosure used to support the IterateThroughHeap function
1415 class IterateThroughHeapObjectClosure: public ObjectClosure {
1416  private:
1417   JvmtiTagMap* _tag_map;
1418   KlassHandle _klass;
1419   int _heap_filter;
1420   const jvmtiHeapCallbacks* _callbacks;
1421   const void* _user_data;
1422 
1423   // accessor functions
1424   JvmtiTagMap* tag_map() const                     { return _tag_map; }
1425   int heap_filter() const                          { return _heap_filter; }
1426   const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
1427   KlassHandle klass() const                        { return _klass; }
1428   const void* user_data() const                    { return _user_data; }
1429 
1430   // indicates if the iteration has been aborted
1431   bool _iteration_aborted;
1432   bool is_iteration_aborted() const                { return _iteration_aborted; }
1433 
1434   // used to check the visit control flags. If the abort flag is set
1435   // then we set the iteration aborted flag so that the iteration completes
1436   // without processing any further objects
1437   bool check_flags_for_abort(jint flags) {
1438     bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1439     if (is_abort) {
1440       _iteration_aborted = true;
1441     }
1442     return is_abort;
1443   }
1444 
1445  public:
1446   IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1447                                   KlassHandle klass,
1448                                   int heap_filter,
1449                                   const jvmtiHeapCallbacks* heap_callbacks,
1450                                   const void* user_data) :
1451     _tag_map(tag_map),
1452     _klass(klass),
1453     _heap_filter(heap_filter),
1454     _callbacks(heap_callbacks),
1455     _user_data(user_data),
1456     _iteration_aborted(false)
1457   {
1458   }
1459 
1460   void do_object(oop o);
1461 };
1462 
1463 // invoked for each object in the heap
1464 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1465   // check if iteration has been halted
1466   if (is_iteration_aborted()) return;
1467 
1468   // ignore any objects that aren't visible to profiler
1469   if (!ServiceUtil::visible_oop(obj)) return;
1470 
1471   // apply class filter
1472   if (is_filtered_by_klass_filter(obj, klass())) return;
1473 
1474   // prepare for callback
1475   CallbackWrapper wrapper(tag_map(), obj);
1476 
1477   // check if filtered by the heap filter
1478   if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1479     return;
1480   }
1481 
1482   // for arrays we need the length, otherwise -1
1483   bool is_array = obj->is_array();
1484   int len = is_array ? arrayOop(obj)->length() : -1;
1485 
1486   // invoke the object callback (if callback is provided)
1487   if (callbacks()->heap_iteration_callback != NULL) {
1488     jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1489     jint res = (*cb)(wrapper.klass_tag(),
1490                      wrapper.obj_size(),
1491                      wrapper.obj_tag_p(),
1492                      (jint)len,
1493                      (void*)user_data());
1494     if (check_flags_for_abort(res)) return;
1495   }
1496 
1497   // for objects and classes we report primitive fields if callback provided
1498   if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1499     jint res;
1500     jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1501     if (obj->klass() == SystemDictionary::Class_klass()) {
1502       res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1503                                                                     obj,
1504                                                                     cb,
1505                                                                     (void*)user_data());
1506     } else {
1507       res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1508                                                                       obj,
1509                                                                       cb,
1510                                                                       (void*)user_data());
1511     }
1512     if (check_flags_for_abort(res)) return;
1513   }
1514 
1515   // string callback
1516   if (!is_array &&
1517       callbacks()->string_primitive_value_callback != NULL &&
1518       obj->klass() == SystemDictionary::String_klass()) {
1519     jint res = invoke_string_value_callback(
1520                 callbacks()->string_primitive_value_callback,
1521                 &wrapper,
1522                 obj,
1523                 (void*)user_data() );
1524     if (check_flags_for_abort(res)) return;
1525   }
1526 
1527   // array callback
1528   if (is_array &&
1529       callbacks()->array_primitive_value_callback != NULL &&
1530       obj->is_typeArray()) {
1531     jint res = invoke_array_primitive_value_callback(
1532                callbacks()->array_primitive_value_callback,
1533                &wrapper,
1534                obj,
1535                (void*)user_data() );
1536     if (check_flags_for_abort(res)) return;
1537   }
1538 };
1539 
1540 
1541 // Deprecated function to iterate over all objects in the heap
1542 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1543                                     KlassHandle klass,
1544                                     jvmtiHeapObjectCallback heap_object_callback,
1545                                     const void* user_data)
1546 {
1547   MutexLocker ml(Heap_lock);
1548   IterateOverHeapObjectClosure blk(this,
1549                                    klass,
1550                                    object_filter,
1551                                    heap_object_callback,
1552                                    user_data);
1553   VM_HeapIterateOperation op(&blk);
1554   VMThread::execute(&op);
1555 }
1556 
1557 
1558 // Iterates over all objects in the heap
1559 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1560                                        KlassHandle klass,
1561                                        const jvmtiHeapCallbacks* callbacks,
1562                                        const void* user_data)
1563 {
1564   MutexLocker ml(Heap_lock);
1565   IterateThroughHeapObjectClosure blk(this,
1566                                       klass,
1567                                       heap_filter,
1568                                       callbacks,
1569                                       user_data);
1570   VM_HeapIterateOperation op(&blk);
1571   VMThread::execute(&op);
1572 }
1573 
1574 // support class for get_objects_with_tags
1575 
1576 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1577  private:
1578   JvmtiEnv* _env;
1579   jlong* _tags;
1580   jint _tag_count;
1581 
1582   GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
1583   GrowableArray<uint64_t>* _tag_results;    // collected tags
1584 
1585  public:
1586   TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1587     _env = env;
1588     _tags = (jlong*)tags;
1589     _tag_count = tag_count;
1590     _object_results = new (ResourceObj::C_HEAP) GrowableArray<jobject>(1,true);
1591     _tag_results = new (ResourceObj::C_HEAP) GrowableArray<uint64_t>(1,true);
1592   }
1593 
1594   ~TagObjectCollector() {
1595     delete _object_results;
1596     delete _tag_results;
1597   }
1598 
1599   // for each tagged object check if the tag value matches
1600   // - if it matches then we create a JNI local reference to the object
1601   // and record the reference and tag value.
1602   //
1603   void do_entry(JvmtiTagHashmapEntry* entry) {
1604     for (int i=0; i<_tag_count; i++) {
1605       if (_tags[i] == entry->tag()) {
1606         oop o = JNIHandles::resolve(entry->object());
1607         assert(o != NULL && o != JNIHandles::deleted_handle(), "sanity check");
1608 
1609         // the mirror is tagged
1610         if (o->is_klass()) {
1611           klassOop k = (klassOop)o;
1612           o = Klass::cast(k)->java_mirror();
1613         }
1614 
1615         jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1616         _object_results->append(ref);
1617         _tag_results->append((uint64_t)entry->tag());
1618       }
1619     }
1620   }
1621 
1622   // return the results from the collection
1623   //
1624   jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1625     jvmtiError error;
1626     int count = _object_results->length();
1627     assert(count >= 0, "sanity check");
1628 
1629     // if object_result_ptr is not NULL then allocate the result and copy
1630     // in the object references.
1631     if (object_result_ptr != NULL) {
1632       error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1633       if (error != JVMTI_ERROR_NONE) {
1634         return error;
1635       }
1636       for (int i=0; i<count; i++) {
1637         (*object_result_ptr)[i] = _object_results->at(i);
1638       }
1639     }
1640 
1641     // if tag_result_ptr is not NULL then allocate the result and copy
1642     // in the tag values.
1643     if (tag_result_ptr != NULL) {
1644       error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1645       if (error != JVMTI_ERROR_NONE) {
1646         if (object_result_ptr != NULL) {
1647           _env->Deallocate((unsigned char*)object_result_ptr);
1648         }
1649         return error;
1650       }
1651       for (int i=0; i<count; i++) {
1652         (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1653       }
1654     }
1655 
1656     *count_ptr = count;
1657     return JVMTI_ERROR_NONE;
1658   }
1659 };
1660 
1661 // return the list of objects with the specified tags
1662 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1663   jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1664 
1665   TagObjectCollector collector(env(), tags, count);
1666   {
1667     // iterate over all tagged objects
1668     MutexLocker ml(lock());
1669     entry_iterate(&collector);
1670   }
1671   return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1672 }
1673 
1674 
1675 // ObjectMarker is used to support the marking objects when walking the
1676 // heap.
1677 //
1678 // This implementation uses the existing mark bits in an object for
1679 // marking. Objects that are marked must later have their headers restored.
1680 // As most objects are unlocked and don't have their identity hash computed
1681 // we don't have to save their headers. Instead we save the headers that
1682 // are "interesting". Later when the headers are restored this implementation
1683 // restores all headers to their initial value and then restores the few
1684 // objects that had interesting headers.
1685 //
1686 // Future work: This implementation currently uses growable arrays to save
1687 // the oop and header of interesting objects. As an optimization we could
1688 // use the same technique as the GC and make use of the unused area
1689 // between top() and end().
1690 //
1691 
1692 // An ObjectClosure used to restore the mark bits of an object
1693 class RestoreMarksClosure : public ObjectClosure {
1694  public:
1695   void do_object(oop o) {
1696     if (o != NULL) {
1697       markOop mark = o->mark();
1698       if (mark->is_marked()) {
1699         o->init_mark();
1700       }
1701     }
1702   }
1703 };
1704 
1705 // ObjectMarker provides the mark and visited functions
1706 class ObjectMarker : AllStatic {
1707  private:
1708   // saved headers
1709   static GrowableArray<oop>* _saved_oop_stack;
1710   static GrowableArray<markOop>* _saved_mark_stack;
1711 
1712  public:
1713   static void init();                       // initialize
1714   static void done();                       // clean-up
1715 
1716   static inline void mark(oop o);           // mark an object
1717   static inline bool visited(oop o);        // check if object has been visited
1718 };
1719 
1720 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1721 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1722 
1723 // initialize ObjectMarker - prepares for object marking
1724 void ObjectMarker::init() {
1725   assert(Thread::current()->is_VM_thread(), "must be VMThread");
1726 
1727   // prepare heap for iteration
1728   Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
1729 
1730   // create stacks for interesting headers
1731   _saved_mark_stack = new (ResourceObj::C_HEAP) GrowableArray<markOop>(4000, true);
1732   _saved_oop_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
1733 
1734   if (UseBiasedLocking) {
1735     BiasedLocking::preserve_marks();
1736   }
1737 }
1738 
1739 // Object marking is done so restore object headers
1740 void ObjectMarker::done() {
1741   // iterate over all objects and restore the mark bits to
1742   // their initial value
1743   RestoreMarksClosure blk;
1744   Universe::heap()->object_iterate(&blk);
1745 
1746   // When sharing is enabled we need to restore the headers of the objects
1747   // in the readwrite space too.
1748   if (UseSharedSpaces) {
1749     GenCollectedHeap* gch = GenCollectedHeap::heap();
1750     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
1751     gen->rw_space()->object_iterate(&blk);
1752   }
1753 
1754   // now restore the interesting headers
1755   for (int i = 0; i < _saved_oop_stack->length(); i++) {
1756     oop o = _saved_oop_stack->at(i);
1757     markOop mark = _saved_mark_stack->at(i);
1758     o->set_mark(mark);
1759   }
1760 
1761   if (UseBiasedLocking) {
1762     BiasedLocking::restore_marks();
1763   }
1764 
1765   // free the stacks
1766   delete _saved_oop_stack;
1767   delete _saved_mark_stack;
1768 }
1769 
1770 // mark an object
1771 inline void ObjectMarker::mark(oop o) {
1772   assert(Universe::heap()->is_in(o), "sanity check");
1773   assert(!o->mark()->is_marked(), "should only mark an object once");
1774 
1775   // object's mark word
1776   markOop mark = o->mark();
1777 
1778   if (mark->must_be_preserved(o)) {
1779     _saved_mark_stack->push(mark);
1780     _saved_oop_stack->push(o);
1781   }
1782 
1783   // mark the object
1784   o->set_mark(markOopDesc::prototype()->set_marked());
1785 }
1786 
1787 // return true if object is marked
1788 inline bool ObjectMarker::visited(oop o) {
1789   return o->mark()->is_marked();
1790 }
1791 
1792 // Stack allocated class to help ensure that ObjectMarker is used
1793 // correctly. Constructor initializes ObjectMarker, destructor calls
1794 // ObjectMarker's done() function to restore object headers.
1795 class ObjectMarkerController : public StackObj {
1796  public:
1797   ObjectMarkerController() {
1798     ObjectMarker::init();
1799   }
1800   ~ObjectMarkerController() {
1801     ObjectMarker::done();
1802   }
1803 };
1804 
1805 
1806 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1807 // (not performance critical as only used for roots)
1808 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1809   switch (kind) {
1810     case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1811     case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1812     case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
1813     case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
1814     case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
1815     case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
1816     case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
1817     default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
1818   }
1819 }
1820 
1821 // Base class for all heap walk contexts. The base class maintains a flag
1822 // to indicate if the context is valid or not.
1823 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1824  private:
1825   bool _valid;
1826  public:
1827   HeapWalkContext(bool valid)                   { _valid = valid; }
1828   void invalidate()                             { _valid = false; }
1829   bool is_valid() const                         { return _valid; }
1830 };
1831 
1832 // A basic heap walk context for the deprecated heap walking functions.
1833 // The context for a basic heap walk are the callbacks and fields used by
1834 // the referrer caching scheme.
1835 class BasicHeapWalkContext: public HeapWalkContext {
1836  private:
1837   jvmtiHeapRootCallback _heap_root_callback;
1838   jvmtiStackReferenceCallback _stack_ref_callback;
1839   jvmtiObjectReferenceCallback _object_ref_callback;
1840 
1841   // used for caching
1842   oop _last_referrer;
1843   jlong _last_referrer_tag;
1844 
1845  public:
1846   BasicHeapWalkContext() : HeapWalkContext(false) { }
1847 
1848   BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1849                        jvmtiStackReferenceCallback stack_ref_callback,
1850                        jvmtiObjectReferenceCallback object_ref_callback) :
1851     HeapWalkContext(true),
1852     _heap_root_callback(heap_root_callback),
1853     _stack_ref_callback(stack_ref_callback),
1854     _object_ref_callback(object_ref_callback),
1855     _last_referrer(NULL),
1856     _last_referrer_tag(0) {
1857   }
1858 
1859   // accessors
1860   jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
1861   jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
1862   jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
1863 
1864   oop last_referrer() const               { return _last_referrer; }
1865   void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
1866   jlong last_referrer_tag() const         { return _last_referrer_tag; }
1867   void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1868 };
1869 
1870 // The advanced heap walk context for the FollowReferences functions.
1871 // The context is the callbacks, and the fields used for filtering.
1872 class AdvancedHeapWalkContext: public HeapWalkContext {
1873  private:
1874   jint _heap_filter;
1875   KlassHandle _klass_filter;
1876   const jvmtiHeapCallbacks* _heap_callbacks;
1877 
1878  public:
1879   AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1880 
1881   AdvancedHeapWalkContext(jint heap_filter,
1882                            KlassHandle klass_filter,
1883                            const jvmtiHeapCallbacks* heap_callbacks) :
1884     HeapWalkContext(true),
1885     _heap_filter(heap_filter),
1886     _klass_filter(klass_filter),
1887     _heap_callbacks(heap_callbacks) {
1888   }
1889 
1890   // accessors
1891   jint heap_filter() const         { return _heap_filter; }
1892   KlassHandle klass_filter() const { return _klass_filter; }
1893 
1894   const jvmtiHeapReferenceCallback heap_reference_callback() const {
1895     return _heap_callbacks->heap_reference_callback;
1896   };
1897   const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1898     return _heap_callbacks->primitive_field_callback;
1899   }
1900   const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1901     return _heap_callbacks->array_primitive_value_callback;
1902   }
1903   const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1904     return _heap_callbacks->string_primitive_value_callback;
1905   }
1906 };
1907 
1908 // The CallbackInvoker is a class with static functions that the heap walk can call
1909 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1910 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1911 // mode is for the newer FollowReferences function which supports a lot of
1912 // additional callbacks.
1913 class CallbackInvoker : AllStatic {
1914  private:
1915   // heap walk styles
1916   enum { basic, advanced };
1917   static int _heap_walk_type;
1918   static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
1919   static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
1920 
1921   // context for basic style heap walk
1922   static BasicHeapWalkContext _basic_context;
1923   static BasicHeapWalkContext* basic_context() {
1924     assert(_basic_context.is_valid(), "invalid");
1925     return &_basic_context;
1926   }
1927 
1928   // context for advanced style heap walk
1929   static AdvancedHeapWalkContext _advanced_context;
1930   static AdvancedHeapWalkContext* advanced_context() {
1931     assert(_advanced_context.is_valid(), "invalid");
1932     return &_advanced_context;
1933   }
1934 
1935   // context needed for all heap walks
1936   static JvmtiTagMap* _tag_map;
1937   static const void* _user_data;
1938   static GrowableArray<oop>* _visit_stack;
1939 
1940   // accessors
1941   static JvmtiTagMap* tag_map()                        { return _tag_map; }
1942   static const void* user_data()                       { return _user_data; }
1943   static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
1944 
1945   // if the object hasn't been visited then push it onto the visit stack
1946   // so that it will be visited later
1947   static inline bool check_for_visit(oop obj) {
1948     if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1949     return true;
1950   }
1951 
1952   // invoke basic style callbacks
1953   static inline bool invoke_basic_heap_root_callback
1954     (jvmtiHeapRootKind root_kind, oop obj);
1955   static inline bool invoke_basic_stack_ref_callback
1956     (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1957      int slot, oop obj);
1958   static inline bool invoke_basic_object_reference_callback
1959     (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1960 
1961   // invoke advanced style callbacks
1962   static inline bool invoke_advanced_heap_root_callback
1963     (jvmtiHeapReferenceKind ref_kind, oop obj);
1964   static inline bool invoke_advanced_stack_ref_callback
1965     (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1966      jmethodID method, jlocation bci, jint slot, oop obj);
1967   static inline bool invoke_advanced_object_reference_callback
1968     (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1969 
1970   // used to report the value of primitive fields
1971   static inline bool report_primitive_field
1972     (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1973 
1974  public:
1975   // initialize for basic mode
1976   static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1977                                              GrowableArray<oop>* visit_stack,
1978                                              const void* user_data,
1979                                              BasicHeapWalkContext context);
1980 
1981   // initialize for advanced mode
1982   static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1983                                                 GrowableArray<oop>* visit_stack,
1984                                                 const void* user_data,
1985                                                 AdvancedHeapWalkContext context);
1986 
1987    // functions to report roots
1988   static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1989   static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1990     jmethodID m, oop o);
1991   static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1992     jmethodID method, jlocation bci, jint slot, oop o);
1993 
1994   // functions to report references
1995   static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1996   static inline bool report_class_reference(oop referrer, oop referree);
1997   static inline bool report_class_loader_reference(oop referrer, oop referree);
1998   static inline bool report_signers_reference(oop referrer, oop referree);
1999   static inline bool report_protection_domain_reference(oop referrer, oop referree);
2000   static inline bool report_superclass_reference(oop referrer, oop referree);
2001   static inline bool report_interface_reference(oop referrer, oop referree);
2002   static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
2003   static inline bool report_field_reference(oop referrer, oop referree, jint slot);
2004   static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
2005   static inline bool report_primitive_array_values(oop array);
2006   static inline bool report_string_value(oop str);
2007   static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
2008   static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
2009 };
2010 
2011 // statics
2012 int CallbackInvoker::_heap_walk_type;
2013 BasicHeapWalkContext CallbackInvoker::_basic_context;
2014 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
2015 JvmtiTagMap* CallbackInvoker::_tag_map;
2016 const void* CallbackInvoker::_user_data;
2017 GrowableArray<oop>* CallbackInvoker::_visit_stack;
2018 
2019 // initialize for basic heap walk (IterateOverReachableObjects et al)
2020 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
2021                                                      GrowableArray<oop>* visit_stack,
2022                                                      const void* user_data,
2023                                                      BasicHeapWalkContext context) {
2024   _tag_map = tag_map;
2025   _visit_stack = visit_stack;
2026   _user_data = user_data;
2027   _basic_context = context;
2028   _advanced_context.invalidate();       // will trigger assertion if used
2029   _heap_walk_type = basic;
2030 }
2031 
2032 // initialize for advanced heap walk (FollowReferences)
2033 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
2034                                                         GrowableArray<oop>* visit_stack,
2035                                                         const void* user_data,
2036                                                         AdvancedHeapWalkContext context) {
2037   _tag_map = tag_map;
2038   _visit_stack = visit_stack;
2039   _user_data = user_data;
2040   _advanced_context = context;
2041   _basic_context.invalidate();      // will trigger assertion if used
2042   _heap_walk_type = advanced;
2043 }
2044 
2045 
2046 // invoke basic style heap root callback
2047 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
2048   assert(ServiceUtil::visible_oop(obj), "checking");
2049 
2050   // if we heap roots should be reported
2051   jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
2052   if (cb == NULL) {
2053     return check_for_visit(obj);
2054   }
2055 
2056   CallbackWrapper wrapper(tag_map(), obj);
2057   jvmtiIterationControl control = (*cb)(root_kind,
2058                                         wrapper.klass_tag(),
2059                                         wrapper.obj_size(),
2060                                         wrapper.obj_tag_p(),
2061                                         (void*)user_data());
2062   // push root to visit stack when following references
2063   if (control == JVMTI_ITERATION_CONTINUE &&
2064       basic_context()->object_ref_callback() != NULL) {
2065     visit_stack()->push(obj);
2066   }
2067   return control != JVMTI_ITERATION_ABORT;
2068 }
2069 
2070 // invoke basic style stack ref callback
2071 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
2072                                                              jlong thread_tag,
2073                                                              jint depth,
2074                                                              jmethodID method,
2075                                                              jint slot,
2076                                                              oop obj) {
2077   assert(ServiceUtil::visible_oop(obj), "checking");
2078 
2079   // if we stack refs should be reported
2080   jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
2081   if (cb == NULL) {
2082     return check_for_visit(obj);
2083   }
2084 
2085   CallbackWrapper wrapper(tag_map(), obj);
2086   jvmtiIterationControl control = (*cb)(root_kind,
2087                                         wrapper.klass_tag(),
2088                                         wrapper.obj_size(),
2089                                         wrapper.obj_tag_p(),
2090                                         thread_tag,
2091                                         depth,
2092                                         method,
2093                                         slot,
2094                                         (void*)user_data());
2095   // push root to visit stack when following references
2096   if (control == JVMTI_ITERATION_CONTINUE &&
2097       basic_context()->object_ref_callback() != NULL) {
2098     visit_stack()->push(obj);
2099   }
2100   return control != JVMTI_ITERATION_ABORT;
2101 }
2102 
2103 // invoke basic style object reference callback
2104 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2105                                                                     oop referrer,
2106                                                                     oop referree,
2107                                                                     jint index) {
2108 
2109   assert(ServiceUtil::visible_oop(referrer), "checking");
2110   assert(ServiceUtil::visible_oop(referree), "checking");
2111 
2112   BasicHeapWalkContext* context = basic_context();
2113 
2114   // callback requires the referrer's tag. If it's the same referrer
2115   // as the last call then we use the cached value.
2116   jlong referrer_tag;
2117   if (referrer == context->last_referrer()) {
2118     referrer_tag = context->last_referrer_tag();
2119   } else {
2120     referrer_tag = tag_for(tag_map(), klassOop_if_java_lang_Class(referrer));
2121   }
2122 
2123   // do the callback
2124   CallbackWrapper wrapper(tag_map(), referree);
2125   jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2126   jvmtiIterationControl control = (*cb)(ref_kind,
2127                                         wrapper.klass_tag(),
2128                                         wrapper.obj_size(),
2129                                         wrapper.obj_tag_p(),
2130                                         referrer_tag,
2131                                         index,
2132                                         (void*)user_data());
2133 
2134   // record referrer and referrer tag. For self-references record the
2135   // tag value from the callback as this might differ from referrer_tag.
2136   context->set_last_referrer(referrer);
2137   if (referrer == referree) {
2138     context->set_last_referrer_tag(*wrapper.obj_tag_p());
2139   } else {
2140     context->set_last_referrer_tag(referrer_tag);
2141   }
2142 
2143   if (control == JVMTI_ITERATION_CONTINUE) {
2144     return check_for_visit(referree);
2145   } else {
2146     return control != JVMTI_ITERATION_ABORT;
2147   }
2148 }
2149 
2150 // invoke advanced style heap root callback
2151 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2152                                                                 oop obj) {
2153   assert(ServiceUtil::visible_oop(obj), "checking");
2154 
2155   AdvancedHeapWalkContext* context = advanced_context();
2156 
2157   // check that callback is provided
2158   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2159   if (cb == NULL) {
2160     return check_for_visit(obj);
2161   }
2162 
2163   // apply class filter
2164   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2165     return check_for_visit(obj);
2166   }
2167 
2168   // setup the callback wrapper
2169   CallbackWrapper wrapper(tag_map(), obj);
2170 
2171   // apply tag filter
2172   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2173                                  wrapper.klass_tag(),
2174                                  context->heap_filter())) {
2175     return check_for_visit(obj);
2176   }
2177 
2178   // for arrays we need the length, otherwise -1
2179   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2180 
2181   // invoke the callback
2182   jint res  = (*cb)(ref_kind,
2183                     NULL, // referrer info
2184                     wrapper.klass_tag(),
2185                     0,    // referrer_class_tag is 0 for heap root
2186                     wrapper.obj_size(),
2187                     wrapper.obj_tag_p(),
2188                     NULL, // referrer_tag_p
2189                     len,
2190                     (void*)user_data());
2191   if (res & JVMTI_VISIT_ABORT) {
2192     return false;// referrer class tag
2193   }
2194   if (res & JVMTI_VISIT_OBJECTS) {
2195     check_for_visit(obj);
2196   }
2197   return true;
2198 }
2199 
2200 // report a reference from a thread stack to an object
2201 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2202                                                                 jlong thread_tag,
2203                                                                 jlong tid,
2204                                                                 int depth,
2205                                                                 jmethodID method,
2206                                                                 jlocation bci,
2207                                                                 jint slot,
2208                                                                 oop obj) {
2209   assert(ServiceUtil::visible_oop(obj), "checking");
2210 
2211   AdvancedHeapWalkContext* context = advanced_context();
2212 
2213   // check that callback is provider
2214   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2215   if (cb == NULL) {
2216     return check_for_visit(obj);
2217   }
2218 
2219   // apply class filter
2220   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2221     return check_for_visit(obj);
2222   }
2223 
2224   // setup the callback wrapper
2225   CallbackWrapper wrapper(tag_map(), obj);
2226 
2227   // apply tag filter
2228   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2229                                  wrapper.klass_tag(),
2230                                  context->heap_filter())) {
2231     return check_for_visit(obj);
2232   }
2233 
2234   // setup the referrer info
2235   jvmtiHeapReferenceInfo reference_info;
2236   reference_info.stack_local.thread_tag = thread_tag;
2237   reference_info.stack_local.thread_id = tid;
2238   reference_info.stack_local.depth = depth;
2239   reference_info.stack_local.method = method;
2240   reference_info.stack_local.location = bci;
2241   reference_info.stack_local.slot = slot;
2242 
2243   // for arrays we need the length, otherwise -1
2244   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2245 
2246   // call into the agent
2247   int res = (*cb)(ref_kind,
2248                   &reference_info,
2249                   wrapper.klass_tag(),
2250                   0,    // referrer_class_tag is 0 for heap root (stack)
2251                   wrapper.obj_size(),
2252                   wrapper.obj_tag_p(),
2253                   NULL, // referrer_tag is 0 for root
2254                   len,
2255                   (void*)user_data());
2256 
2257   if (res & JVMTI_VISIT_ABORT) {
2258     return false;
2259   }
2260   if (res & JVMTI_VISIT_OBJECTS) {
2261     check_for_visit(obj);
2262   }
2263   return true;
2264 }
2265 
2266 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2267 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2268 #define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
2269                       | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
2270                       | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2271                       | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2272                       | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
2273                       | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2274 
2275 // invoke the object reference callback to report a reference
2276 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2277                                                                        oop referrer,
2278                                                                        oop obj,
2279                                                                        jint index)
2280 {
2281   // field index is only valid field in reference_info
2282   static jvmtiHeapReferenceInfo reference_info = { 0 };
2283 
2284   assert(ServiceUtil::visible_oop(referrer), "checking");
2285   assert(ServiceUtil::visible_oop(obj), "checking");
2286 
2287   AdvancedHeapWalkContext* context = advanced_context();
2288 
2289   // check that callback is provider
2290   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2291   if (cb == NULL) {
2292     return check_for_visit(obj);
2293   }
2294 
2295   // apply class filter
2296   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2297     return check_for_visit(obj);
2298   }
2299 
2300   // setup the callback wrapper
2301   TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2302 
2303   // apply tag filter
2304   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2305                                  wrapper.klass_tag(),
2306                                  context->heap_filter())) {
2307     return check_for_visit(obj);
2308   }
2309 
2310   // field index is only valid field in reference_info
2311   reference_info.field.index = index;
2312 
2313   // for arrays we need the length, otherwise -1
2314   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2315 
2316   // invoke the callback
2317   int res = (*cb)(ref_kind,
2318                   (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2319                   wrapper.klass_tag(),
2320                   wrapper.referrer_klass_tag(),
2321                   wrapper.obj_size(),
2322                   wrapper.obj_tag_p(),
2323                   wrapper.referrer_tag_p(),
2324                   len,
2325                   (void*)user_data());
2326 
2327   if (res & JVMTI_VISIT_ABORT) {
2328     return false;
2329   }
2330   if (res & JVMTI_VISIT_OBJECTS) {
2331     check_for_visit(obj);
2332   }
2333   return true;
2334 }
2335 
2336 // report a "simple root"
2337 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2338   assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2339          kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2340   assert(ServiceUtil::visible_oop(obj), "checking");
2341 
2342   if (is_basic_heap_walk()) {
2343     // map to old style root kind
2344     jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2345     return invoke_basic_heap_root_callback(root_kind, obj);
2346   } else {
2347     assert(is_advanced_heap_walk(), "wrong heap walk type");
2348     return invoke_advanced_heap_root_callback(kind, obj);
2349   }
2350 }
2351 
2352 
2353 // invoke the primitive array values
2354 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2355   assert(obj->is_typeArray(), "not a primitive array");
2356 
2357   AdvancedHeapWalkContext* context = advanced_context();
2358   assert(context->array_primitive_value_callback() != NULL, "no callback");
2359 
2360   // apply class filter
2361   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2362     return true;
2363   }
2364 
2365   CallbackWrapper wrapper(tag_map(), obj);
2366 
2367   // apply tag filter
2368   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2369                                  wrapper.klass_tag(),
2370                                  context->heap_filter())) {
2371     return true;
2372   }
2373 
2374   // invoke the callback
2375   int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2376                                                   &wrapper,
2377                                                   obj,
2378                                                   (void*)user_data());
2379   return (!(res & JVMTI_VISIT_ABORT));
2380 }
2381 
2382 // invoke the string value callback
2383 inline bool CallbackInvoker::report_string_value(oop str) {
2384   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2385 
2386   AdvancedHeapWalkContext* context = advanced_context();
2387   assert(context->string_primitive_value_callback() != NULL, "no callback");
2388 
2389   // apply class filter
2390   if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2391     return true;
2392   }
2393 
2394   CallbackWrapper wrapper(tag_map(), str);
2395 
2396   // apply tag filter
2397   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2398                                  wrapper.klass_tag(),
2399                                  context->heap_filter())) {
2400     return true;
2401   }
2402 
2403   // invoke the callback
2404   int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2405                                          &wrapper,
2406                                          str,
2407                                          (void*)user_data());
2408   return (!(res & JVMTI_VISIT_ABORT));
2409 }
2410 
2411 // invoke the primitive field callback
2412 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2413                                                     oop obj,
2414                                                     jint index,
2415                                                     address addr,
2416                                                     char type)
2417 {
2418   // for primitive fields only the index will be set
2419   static jvmtiHeapReferenceInfo reference_info = { 0 };
2420 
2421   AdvancedHeapWalkContext* context = advanced_context();
2422   assert(context->primitive_field_callback() != NULL, "no callback");
2423 
2424   // apply class filter
2425   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2426     return true;
2427   }
2428 
2429   CallbackWrapper wrapper(tag_map(), obj);
2430 
2431   // apply tag filter
2432   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2433                                  wrapper.klass_tag(),
2434                                  context->heap_filter())) {
2435     return true;
2436   }
2437 
2438   // the field index in the referrer
2439   reference_info.field.index = index;
2440 
2441   // map the type
2442   jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2443 
2444   // setup the jvalue
2445   jvalue value;
2446   copy_to_jvalue(&value, addr, value_type);
2447 
2448   jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2449   int res = (*cb)(ref_kind,
2450                   &reference_info,
2451                   wrapper.klass_tag(),
2452                   wrapper.obj_tag_p(),
2453                   value,
2454                   value_type,
2455                   (void*)user_data());
2456   return (!(res & JVMTI_VISIT_ABORT));
2457 }
2458 
2459 
2460 // instance field
2461 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2462                                                              jint index,
2463                                                              address value,
2464                                                              char type) {
2465   return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2466                                 obj,
2467                                 index,
2468                                 value,
2469                                 type);
2470 }
2471 
2472 // static field
2473 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2474                                                            jint index,
2475                                                            address value,
2476                                                            char type) {
2477   return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2478                                 obj,
2479                                 index,
2480                                 value,
2481                                 type);
2482 }
2483 
2484 // report a JNI local (root object) to the profiler
2485 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2486   if (is_basic_heap_walk()) {
2487     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2488                                            thread_tag,
2489                                            depth,
2490                                            m,
2491                                            -1,
2492                                            obj);
2493   } else {
2494     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2495                                               thread_tag, tid,
2496                                               depth,
2497                                               m,
2498                                               (jlocation)-1,
2499                                               -1,
2500                                               obj);
2501   }
2502 }
2503 
2504 
2505 // report a local (stack reference, root object)
2506 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2507                                                    jlong tid,
2508                                                    jint depth,
2509                                                    jmethodID method,
2510                                                    jlocation bci,
2511                                                    jint slot,
2512                                                    oop obj) {
2513   if (is_basic_heap_walk()) {
2514     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2515                                            thread_tag,
2516                                            depth,
2517                                            method,
2518                                            slot,
2519                                            obj);
2520   } else {
2521     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2522                                               thread_tag,
2523                                               tid,
2524                                               depth,
2525                                               method,
2526                                               bci,
2527                                               slot,
2528                                               obj);
2529   }
2530 }
2531 
2532 // report an object referencing a class.
2533 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2534   if (is_basic_heap_walk()) {
2535     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2536   } else {
2537     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2538   }
2539 }
2540 
2541 // report a class referencing its class loader.
2542 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2543   if (is_basic_heap_walk()) {
2544     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2545   } else {
2546     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2547   }
2548 }
2549 
2550 // report a class referencing its signers.
2551 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2552   if (is_basic_heap_walk()) {
2553     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2554   } else {
2555     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2556   }
2557 }
2558 
2559 // report a class referencing its protection domain..
2560 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2561   if (is_basic_heap_walk()) {
2562     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2563   } else {
2564     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2565   }
2566 }
2567 
2568 // report a class referencing its superclass.
2569 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2570   if (is_basic_heap_walk()) {
2571     // Send this to be consistent with past implementation
2572     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2573   } else {
2574     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2575   }
2576 }
2577 
2578 // report a class referencing one of its interfaces.
2579 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2580   if (is_basic_heap_walk()) {
2581     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2582   } else {
2583     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2584   }
2585 }
2586 
2587 // report a class referencing one of its static fields.
2588 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2589   if (is_basic_heap_walk()) {
2590     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2591   } else {
2592     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2593   }
2594 }
2595 
2596 // report an array referencing an element object
2597 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2598   if (is_basic_heap_walk()) {
2599     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2600   } else {
2601     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2602   }
2603 }
2604 
2605 // report an object referencing an instance field object
2606 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2607   if (is_basic_heap_walk()) {
2608     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2609   } else {
2610     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2611   }
2612 }
2613 
2614 // report an array referencing an element object
2615 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2616   if (is_basic_heap_walk()) {
2617     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2618   } else {
2619     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2620   }
2621 }
2622 
2623 // A supporting closure used to process simple roots
2624 class SimpleRootsClosure : public OopClosure {
2625  private:
2626   jvmtiHeapReferenceKind _kind;
2627   bool _continue;
2628 
2629   jvmtiHeapReferenceKind root_kind()    { return _kind; }
2630 
2631  public:
2632   void set_kind(jvmtiHeapReferenceKind kind) {
2633     _kind = kind;
2634     _continue = true;
2635   }
2636 
2637   inline bool stopped() {
2638     return !_continue;
2639   }
2640 
2641   void do_oop(oop* obj_p) {
2642     // iteration has terminated
2643     if (stopped()) {
2644       return;
2645     }
2646 
2647     // ignore null or deleted handles
2648     oop o = *obj_p;
2649     if (o == NULL || o == JNIHandles::deleted_handle()) {
2650       return;
2651     }
2652 
2653     jvmtiHeapReferenceKind kind = root_kind();
2654 
2655     // many roots are Klasses so we use the java mirror
2656     if (o->is_klass()) {
2657       klassOop k = (klassOop)o;
2658       o = Klass::cast(k)->java_mirror();
2659     } else {
2660 
2661       // SystemDictionary::always_strong_oops_do reports the application
2662       // class loader as a root. We want this root to be reported as
2663       // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2664       if (o->is_instance() && root_kind() == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2665         kind = JVMTI_HEAP_REFERENCE_OTHER;
2666       }
2667     }
2668 
2669     // some objects are ignored - in the case of simple
2670     // roots it's mostly symbolOops that we are skipping
2671     // here.
2672     if (!ServiceUtil::visible_oop(o)) {
2673       return;
2674     }
2675 
2676     // invoke the callback
2677     _continue = CallbackInvoker::report_simple_root(kind, o);
2678 
2679   }
2680   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2681 };
2682 
2683 // A supporting closure used to process JNI locals
2684 class JNILocalRootsClosure : public OopClosure {
2685  private:
2686   jlong _thread_tag;
2687   jlong _tid;
2688   jint _depth;
2689   jmethodID _method;
2690   bool _continue;
2691  public:
2692   void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2693     _thread_tag = thread_tag;
2694     _tid = tid;
2695     _depth = depth;
2696     _method = method;
2697     _continue = true;
2698   }
2699 
2700   inline bool stopped() {
2701     return !_continue;
2702   }
2703 
2704   void do_oop(oop* obj_p) {
2705     // iteration has terminated
2706     if (stopped()) {
2707       return;
2708     }
2709 
2710     // ignore null or deleted handles
2711     oop o = *obj_p;
2712     if (o == NULL || o == JNIHandles::deleted_handle()) {
2713       return;
2714     }
2715 
2716     if (!ServiceUtil::visible_oop(o)) {
2717       return;
2718     }
2719 
2720     // invoke the callback
2721     _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2722   }
2723   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2724 };
2725 
2726 
2727 // A VM operation to iterate over objects that are reachable from
2728 // a set of roots or an initial object.
2729 //
2730 // For VM_HeapWalkOperation the set of roots used is :-
2731 //
2732 // - All JNI global references
2733 // - All inflated monitors
2734 // - All classes loaded by the boot class loader (or all classes
2735 //     in the event that class unloading is disabled)
2736 // - All java threads
2737 // - For each java thread then all locals and JNI local references
2738 //      on the thread's execution stack
2739 // - All visible/explainable objects from Universes::oops_do
2740 //
2741 class VM_HeapWalkOperation: public VM_Operation {
2742  private:
2743   enum {
2744     initial_visit_stack_size = 4000
2745   };
2746 
2747   bool _is_advanced_heap_walk;                      // indicates FollowReferences
2748   JvmtiTagMap* _tag_map;
2749   Handle _initial_object;
2750   GrowableArray<oop>* _visit_stack;                 // the visit stack
2751 
2752   bool _collecting_heap_roots;                      // are we collecting roots
2753   bool _following_object_refs;                      // are we following object references
2754 
2755   bool _reporting_primitive_fields;                 // optional reporting
2756   bool _reporting_primitive_array_values;
2757   bool _reporting_string_values;
2758 
2759   GrowableArray<oop>* create_visit_stack() {
2760     return new (ResourceObj::C_HEAP) GrowableArray<oop>(initial_visit_stack_size, true);
2761   }
2762 
2763   // accessors
2764   bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
2765   JvmtiTagMap* tag_map() const                     { return _tag_map; }
2766   Handle initial_object() const                    { return _initial_object; }
2767 
2768   bool is_following_references() const             { return _following_object_refs; }
2769 
2770   bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
2771   bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2772   bool is_reporting_string_values() const          { return _reporting_string_values; }
2773 
2774   GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
2775 
2776   // iterate over the various object types
2777   inline bool iterate_over_array(oop o);
2778   inline bool iterate_over_type_array(oop o);
2779   inline bool iterate_over_class(klassOop o);
2780   inline bool iterate_over_object(oop o);
2781 
2782   // root collection
2783   inline bool collect_simple_roots();
2784   inline bool collect_stack_roots();
2785   inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2786 
2787   // visit an object
2788   inline bool visit(oop o);
2789 
2790  public:
2791   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2792                        Handle initial_object,
2793                        BasicHeapWalkContext callbacks,
2794                        const void* user_data);
2795 
2796   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2797                        Handle initial_object,
2798                        AdvancedHeapWalkContext callbacks,
2799                        const void* user_data);
2800 
2801   ~VM_HeapWalkOperation();
2802 
2803   VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2804   void doit();
2805 };
2806 
2807 
2808 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2809                                            Handle initial_object,
2810                                            BasicHeapWalkContext callbacks,
2811                                            const void* user_data) {
2812   _is_advanced_heap_walk = false;
2813   _tag_map = tag_map;
2814   _initial_object = initial_object;
2815   _following_object_refs = (callbacks.object_ref_callback() != NULL);
2816   _reporting_primitive_fields = false;
2817   _reporting_primitive_array_values = false;
2818   _reporting_string_values = false;
2819   _visit_stack = create_visit_stack();
2820 
2821 
2822   CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2823 }
2824 
2825 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2826                                            Handle initial_object,
2827                                            AdvancedHeapWalkContext callbacks,
2828                                            const void* user_data) {
2829   _is_advanced_heap_walk = true;
2830   _tag_map = tag_map;
2831   _initial_object = initial_object;
2832   _following_object_refs = true;
2833   _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2834   _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2835   _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2836   _visit_stack = create_visit_stack();
2837 
2838   CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2839 }
2840 
2841 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2842   if (_following_object_refs) {
2843     assert(_visit_stack != NULL, "checking");
2844     delete _visit_stack;
2845     _visit_stack = NULL;
2846   }
2847 }
2848 
2849 // an array references its class and has a reference to
2850 // each element in the array
2851 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2852   objArrayOop array = objArrayOop(o);
2853   if (array->klass() == Universe::systemObjArrayKlassObj()) {
2854     // filtered out
2855     return true;
2856   }
2857 
2858   // array reference to its class
2859   oop mirror = objArrayKlass::cast(array->klass())->java_mirror();
2860   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2861     return false;
2862   }
2863 
2864   // iterate over the array and report each reference to a
2865   // non-null element
2866   for (int index=0; index<array->length(); index++) {
2867     oop elem = array->obj_at(index);
2868     if (elem == NULL) {
2869       continue;
2870     }
2871 
2872     // report the array reference o[index] = elem
2873     if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2874       return false;
2875     }
2876   }
2877   return true;
2878 }
2879 
2880 // a type array references its class
2881 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2882   klassOop k = o->klass();
2883   oop mirror = Klass::cast(k)->java_mirror();
2884   if (!CallbackInvoker::report_class_reference(o, mirror)) {
2885     return false;
2886   }
2887 
2888   // report the array contents if required
2889   if (is_reporting_primitive_array_values()) {
2890     if (!CallbackInvoker::report_primitive_array_values(o)) {
2891       return false;
2892     }
2893   }
2894   return true;
2895 }
2896 
2897 // verify that a static oop field is in range
2898 static inline bool verify_static_oop(instanceKlass* ik,
2899                                      klassOop k, int offset) {
2900   address obj_p = (address)k + offset;
2901   address start = (address)ik->start_of_static_fields();
2902   address end = start + (ik->static_oop_field_size() * heapOopSize);
2903   assert(end >= start, "sanity check");
2904 
2905   if (obj_p >= start && obj_p < end) {
2906     return true;
2907   } else {
2908     return false;
2909   }
2910 }
2911 
2912 // a class references its super class, interfaces, class loader, ...
2913 // and finally its static fields
2914 inline bool VM_HeapWalkOperation::iterate_over_class(klassOop k) {
2915   int i;
2916   Klass* klass = klassOop(k)->klass_part();
2917 
2918   if (klass->oop_is_instance()) {
2919     instanceKlass* ik = instanceKlass::cast(k);
2920 
2921     // ignore the class if it's has been initialized yet
2922     if (!ik->is_linked()) {
2923       return true;
2924     }
2925 
2926     // get the java mirror
2927     oop mirror = klass->java_mirror();
2928 
2929     // super (only if something more interesting than java.lang.Object)
2930     klassOop java_super = ik->java_super();
2931     if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2932       oop super = Klass::cast(java_super)->java_mirror();
2933       if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2934         return false;
2935       }
2936     }
2937 
2938     // class loader
2939     oop cl = ik->class_loader();
2940     if (cl != NULL) {
2941       if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2942         return false;
2943       }
2944     }
2945 
2946     // protection domain
2947     oop pd = ik->protection_domain();
2948     if (pd != NULL) {
2949       if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2950         return false;
2951       }
2952     }
2953 
2954     // signers
2955     oop signers = ik->signers();
2956     if (signers != NULL) {
2957       if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2958         return false;
2959       }
2960     }
2961 
2962     // references from the constant pool
2963     {
2964       const constantPoolOop pool = ik->constants();
2965       for (int i = 1; i < pool->length(); i++) {
2966         constantTag tag = pool->tag_at(i).value();
2967         if (tag.is_string() || tag.is_klass()) {
2968           oop entry;
2969           if (tag.is_string()) {
2970             entry = pool->resolved_string_at(i);
2971             assert(java_lang_String::is_instance(entry), "must be string");
2972           } else {
2973             entry = Klass::cast(pool->resolved_klass_at(i))->java_mirror();
2974           }
2975           if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2976             return false;
2977           }
2978         }
2979       }
2980     }
2981 
2982     // interfaces
2983     // (These will already have been reported as references from the constant pool
2984     //  but are specified by IterateOverReachableObjects and must be reported).
2985     objArrayOop interfaces = ik->local_interfaces();
2986     for (i = 0; i < interfaces->length(); i++) {
2987       oop interf = Klass::cast((klassOop)interfaces->obj_at(i))->java_mirror();
2988       if (interf == NULL) {
2989         continue;
2990       }
2991       if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2992         return false;
2993       }
2994     }
2995 
2996     // iterate over the static fields
2997 
2998     ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
2999     for (i=0; i<field_map->field_count(); i++) {
3000       ClassFieldDescriptor* field = field_map->field_at(i);
3001       char type = field->field_type();
3002       if (!is_primitive_field_type(type)) {
3003         oop fld_o = k->obj_field(field->field_offset());
3004         assert(verify_static_oop(ik, k, field->field_offset()), "sanity check");
3005         if (fld_o != NULL) {
3006           int slot = field->field_index();
3007           if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
3008             delete field_map;
3009             return false;
3010           }
3011         }
3012       } else {
3013          if (is_reporting_primitive_fields()) {
3014            address addr = (address)k + field->field_offset();
3015            int slot = field->field_index();
3016            if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
3017              delete field_map;
3018              return false;
3019           }
3020         }
3021       }
3022     }
3023     delete field_map;
3024 
3025     return true;
3026   }
3027 
3028   return true;
3029 }
3030 
3031 // an object references a class and its instance fields
3032 // (static fields are ignored here as we report these as
3033 // references from the class).
3034 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
3035   // reference to the class
3036   if (!CallbackInvoker::report_class_reference(o, Klass::cast(o->klass())->java_mirror())) {
3037     return false;
3038   }
3039 
3040   // iterate over instance fields
3041   ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
3042   for (int i=0; i<field_map->field_count(); i++) {
3043     ClassFieldDescriptor* field = field_map->field_at(i);
3044     char type = field->field_type();
3045     if (!is_primitive_field_type(type)) {
3046       oop fld_o = o->obj_field(field->field_offset());
3047       if (fld_o != NULL) {
3048         // reflection code may have a reference to a klassOop.
3049         // - see sun.reflect.UnsafeStaticFieldAccessorImpl and sun.misc.Unsafe
3050         if (fld_o->is_klass()) {
3051           klassOop k = (klassOop)fld_o;
3052           fld_o = Klass::cast(k)->java_mirror();
3053         }
3054         int slot = field->field_index();
3055         if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
3056           return false;
3057         }
3058       }
3059     } else {
3060       if (is_reporting_primitive_fields()) {
3061         // primitive instance field
3062         address addr = (address)o + field->field_offset();
3063         int slot = field->field_index();
3064         if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
3065           return false;
3066         }
3067       }
3068     }
3069   }
3070 
3071   // if the object is a java.lang.String
3072   if (is_reporting_string_values() &&
3073       o->klass() == SystemDictionary::String_klass()) {
3074     if (!CallbackInvoker::report_string_value(o)) {
3075       return false;
3076     }
3077   }
3078   return true;
3079 }
3080 
3081 
3082 // collects all simple (non-stack) roots.
3083 // if there's a heap root callback provided then the callback is
3084 // invoked for each simple root.
3085 // if an object reference callback is provided then all simple
3086 // roots are pushed onto the marking stack so that they can be
3087 // processed later
3088 //
3089 inline bool VM_HeapWalkOperation::collect_simple_roots() {
3090   SimpleRootsClosure blk;
3091 
3092   // JNI globals
3093   blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
3094   JNIHandles::oops_do(&blk);
3095   if (blk.stopped()) {
3096     return false;
3097   }
3098 
3099   // Preloaded classes and loader from the system dictionary
3100   blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
3101   SystemDictionary::always_strong_oops_do(&blk);
3102   if (blk.stopped()) {
3103     return false;
3104   }
3105 
3106   // Inflated monitors
3107   blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3108   ObjectSynchronizer::oops_do(&blk);
3109   if (blk.stopped()) {
3110     return false;
3111   }
3112 
3113   // Threads
3114   for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3115     oop threadObj = thread->threadObj();
3116     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3117       bool cont = CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, threadObj);
3118       if (!cont) {
3119         return false;
3120       }
3121     }
3122   }
3123 
3124   // Other kinds of roots maintained by HotSpot
3125   // Many of these won't be visible but others (such as instances of important
3126   // exceptions) will be visible.
3127   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3128   Universe::oops_do(&blk);
3129 
3130   // If there are any non-perm roots in the code cache, visit them.
3131   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3132   CodeBlobToOopClosure look_in_blobs(&blk, false);
3133   CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3134 
3135   return true;
3136 }
3137 
3138 // Walk the stack of a given thread and find all references (locals
3139 // and JNI calls) and report these as stack references
3140 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3141                                                       JNILocalRootsClosure* blk)
3142 {
3143   oop threadObj = java_thread->threadObj();
3144   assert(threadObj != NULL, "sanity check");
3145 
3146   // only need to get the thread's tag once per thread
3147   jlong thread_tag = tag_for(_tag_map, threadObj);
3148 
3149   // also need the thread id
3150   jlong tid = java_lang_Thread::thread_id(threadObj);
3151 
3152 
3153   if (java_thread->has_last_Java_frame()) {
3154 
3155     // vframes are resource allocated
3156     Thread* current_thread = Thread::current();
3157     ResourceMark rm(current_thread);
3158     HandleMark hm(current_thread);
3159 
3160     RegisterMap reg_map(java_thread);
3161     frame f = java_thread->last_frame();
3162     vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
3163 
3164     bool is_top_frame = true;
3165     int depth = 0;
3166     frame* last_entry_frame = NULL;
3167 
3168     while (vf != NULL) {
3169       if (vf->is_java_frame()) {
3170 
3171         // java frame (interpreted, compiled, ...)
3172         javaVFrame *jvf = javaVFrame::cast(vf);
3173 
3174         // the jmethodID
3175         jmethodID method = jvf->method()->jmethod_id();
3176 
3177         if (!(jvf->method()->is_native())) {
3178           jlocation bci = (jlocation)jvf->bci();
3179           StackValueCollection* locals = jvf->locals();
3180           for (int slot=0; slot<locals->size(); slot++) {
3181             if (locals->at(slot)->type() == T_OBJECT) {
3182               oop o = locals->obj_at(slot)();
3183               if (o == NULL) {
3184                 continue;
3185               }
3186 
3187               // stack reference
3188               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3189                                                    bci, slot, o)) {
3190                 return false;
3191               }
3192             }
3193           }
3194         } else {
3195           blk->set_context(thread_tag, tid, depth, method);
3196           if (is_top_frame) {
3197             // JNI locals for the top frame.
3198             java_thread->active_handles()->oops_do(blk);
3199           } else {
3200             if (last_entry_frame != NULL) {
3201               // JNI locals for the entry frame
3202               assert(last_entry_frame->is_entry_frame(), "checking");
3203               last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3204             }
3205           }
3206         }
3207         last_entry_frame = NULL;
3208         depth++;
3209       } else {
3210         // externalVFrame - for an entry frame then we report the JNI locals
3211         // when we find the corresponding javaVFrame
3212         frame* fr = vf->frame_pointer();
3213         assert(fr != NULL, "sanity check");
3214         if (fr->is_entry_frame()) {
3215           last_entry_frame = fr;
3216         }
3217       }
3218 
3219       vf = vf->sender();
3220       is_top_frame = false;
3221     }
3222   } else {
3223     // no last java frame but there may be JNI locals
3224     blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3225     java_thread->active_handles()->oops_do(blk);
3226   }
3227   return true;
3228 }
3229 
3230 
3231 // collects all stack roots - for each thread it walks the execution
3232 // stack to find all references and local JNI refs.
3233 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3234   JNILocalRootsClosure blk;
3235   for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3236     oop threadObj = thread->threadObj();
3237     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3238       if (!collect_stack_roots(thread, &blk)) {
3239         return false;
3240       }
3241     }
3242   }
3243   return true;
3244 }
3245 
3246 // visit an object
3247 // first mark the object as visited
3248 // second get all the outbound references from this object (in other words, all
3249 // the objects referenced by this object).
3250 //
3251 bool VM_HeapWalkOperation::visit(oop o) {
3252   // mark object as visited
3253   assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3254   ObjectMarker::mark(o);
3255 
3256   // instance
3257   if (o->is_instance()) {
3258     if (o->klass() == SystemDictionary::Class_klass()) {
3259       o = klassOop_if_java_lang_Class(o);
3260       if (o->is_klass()) {
3261         // a java.lang.Class
3262         return iterate_over_class(klassOop(o));
3263       }
3264     } else {
3265       return iterate_over_object(o);
3266     }
3267   }
3268 
3269   // object array
3270   if (o->is_objArray()) {
3271     return iterate_over_array(o);
3272   }
3273 
3274   // type array
3275   if (o->is_typeArray()) {
3276     return iterate_over_type_array(o);
3277   }
3278 
3279   return true;
3280 }
3281 
3282 void VM_HeapWalkOperation::doit() {
3283   ResourceMark rm;
3284   ObjectMarkerController marker;
3285   ClassFieldMapCacheMark cm;
3286 
3287   assert(visit_stack()->is_empty(), "visit stack must be empty");
3288 
3289   // the heap walk starts with an initial object or the heap roots
3290   if (initial_object().is_null()) {
3291     if (!collect_simple_roots()) return;
3292     if (!collect_stack_roots()) return;
3293   } else {
3294     visit_stack()->push(initial_object()());
3295   }
3296 
3297   // object references required
3298   if (is_following_references()) {
3299 
3300     // visit each object until all reachable objects have been
3301     // visited or the callback asked to terminate the iteration.
3302     while (!visit_stack()->is_empty()) {
3303       oop o = visit_stack()->pop();
3304       if (!ObjectMarker::visited(o)) {
3305         if (!visit(o)) {
3306           break;
3307         }
3308       }
3309     }
3310   }
3311 }
3312 
3313 // iterate over all objects that are reachable from a set of roots
3314 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3315                                                  jvmtiStackReferenceCallback stack_ref_callback,
3316                                                  jvmtiObjectReferenceCallback object_ref_callback,
3317                                                  const void* user_data) {
3318   MutexLocker ml(Heap_lock);
3319   BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3320   VM_HeapWalkOperation op(this, Handle(), context, user_data);
3321   VMThread::execute(&op);
3322 }
3323 
3324 // iterate over all objects that are reachable from a given object
3325 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3326                                                              jvmtiObjectReferenceCallback object_ref_callback,
3327                                                              const void* user_data) {
3328   oop obj = JNIHandles::resolve(object);
3329   Handle initial_object(Thread::current(), obj);
3330 
3331   MutexLocker ml(Heap_lock);
3332   BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3333   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3334   VMThread::execute(&op);
3335 }
3336 
3337 // follow references from an initial object or the GC roots
3338 void JvmtiTagMap::follow_references(jint heap_filter,
3339                                     KlassHandle klass,
3340                                     jobject object,
3341                                     const jvmtiHeapCallbacks* callbacks,
3342                                     const void* user_data)
3343 {
3344   oop obj = JNIHandles::resolve(object);
3345   Handle initial_object(Thread::current(), obj);
3346 
3347   MutexLocker ml(Heap_lock);
3348   AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3349   VM_HeapWalkOperation op(this, initial_object, context, user_data);
3350   VMThread::execute(&op);
3351 }
3352 
3353 
3354 // called post-GC
3355 // - for each JVMTI environment with an object tag map, call its rehash
3356 // function to re-sync with the new object locations.
3357 void JvmtiTagMap::gc_epilogue(bool full) {
3358   assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
3359   if (JvmtiEnv::environments_might_exist()) {
3360     // re-obtain the memory region for the young generation (might
3361     // changed due to adaptive resizing policy)
3362     get_young_generation();
3363 
3364     JvmtiEnvIterator it;
3365     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3366       JvmtiTagMap* tag_map = env->tag_map();
3367       if (tag_map != NULL && !tag_map->is_empty()) {
3368         TraceTime t(full ? "JVMTI Full Rehash " : "JVMTI Rehash ", TraceJVMTIObjectTagging);
3369         if (full) {
3370           tag_map->rehash(0, n_hashmaps);
3371         } else {
3372           tag_map->rehash(0, 0);        // tag map for young gen only
3373         }
3374       }
3375     }
3376   }
3377 }
3378 
3379 // CMS has completed referencing processing so we may have JNI weak refs
3380 // to objects in the CMS generation that have been GC'ed.
3381 void JvmtiTagMap::cms_ref_processing_epilogue() {
3382   assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
3383   assert(UseConcMarkSweepGC, "should only be used with CMS");
3384   if (JvmtiEnv::environments_might_exist()) {
3385     JvmtiEnvIterator it;
3386     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3387       JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
3388       if (tag_map != NULL && !tag_map->is_empty()) {
3389         TraceTime t("JVMTI Rehash (CMS) ", TraceJVMTIObjectTagging);
3390         tag_map->rehash(1, n_hashmaps);    // assume CMS not used in young gen
3391       }
3392     }
3393   }
3394 }
3395 
3396 
3397 // For each entry in the hashmaps 'start' to 'end' :
3398 //
3399 // 1. resolve the JNI weak reference
3400 //
3401 // 2. If it resolves to NULL it means the object has been freed so the entry
3402 //    is removed, the weak reference destroyed, and the object free event is
3403 //    posted (if enabled).
3404 //
3405 // 3. If the weak reference resolves to an object then we re-hash the object
3406 //    to see if it has moved or has been promoted (from the young to the old
3407 //    generation for example).
3408 //
3409 void JvmtiTagMap::rehash(int start, int end) {
3410 
3411   // does this environment have the OBJECT_FREE event enabled
3412   bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3413 
3414   // counters used for trace message
3415   int freed = 0;
3416   int moved = 0;
3417   int promoted = 0;
3418 
3419   // we assume there are two hashmaps - one for the young generation
3420   // and the other for all other spaces.
3421   assert(n_hashmaps == 2, "not implemented");
3422   JvmtiTagHashmap* young_hashmap = _hashmap[0];
3423   JvmtiTagHashmap* other_hashmap = _hashmap[1];
3424 
3425   // reenable sizing (if disabled)
3426   young_hashmap->set_resizing_enabled(true);
3427   other_hashmap->set_resizing_enabled(true);
3428 
3429   // when re-hashing the hashmap corresponding to the young generation we
3430   // collect the entries corresponding to objects that have been promoted.
3431   JvmtiTagHashmapEntry* promoted_entries = NULL;
3432 
3433   if (end >= n_hashmaps) {
3434     end = n_hashmaps - 1;
3435   }
3436 
3437   for (int i=start; i <= end; i++) {
3438     JvmtiTagHashmap* hashmap = _hashmap[i];
3439 
3440     // if the hashmap is empty then we can skip it
3441     if (hashmap->_entry_count == 0) {
3442       continue;
3443     }
3444 
3445     // now iterate through each entry in the table
3446 
3447     JvmtiTagHashmapEntry** table = hashmap->table();
3448     int size = hashmap->size();
3449 
3450     for (int pos=0; pos<size; pos++) {
3451       JvmtiTagHashmapEntry* entry = table[pos];
3452       JvmtiTagHashmapEntry* prev = NULL;
3453 
3454       while (entry != NULL) {
3455         JvmtiTagHashmapEntry* next = entry->next();
3456 
3457         jweak ref = entry->object();
3458         oop oop = JNIHandles::resolve(ref);
3459 
3460         // has object been GC'ed
3461         if (oop == NULL) {
3462           // grab the tag
3463           jlong tag = entry->tag();
3464           guarantee(tag != 0, "checking");
3465 
3466           // remove GC'ed entry from hashmap and return the
3467           // entry to the free list
3468           hashmap->remove(prev, pos, entry);
3469           destroy_entry(entry);
3470 
3471           // destroy the weak ref
3472           JNIHandles::destroy_weak_global(ref);
3473 
3474           // post the event to the profiler
3475           if (post_object_free) {
3476             JvmtiExport::post_object_free(env(), tag);
3477           }
3478 
3479           freed++;
3480           entry = next;
3481           continue;
3482         }
3483 
3484         // if this is the young hashmap then the object is either promoted
3485         // or moved.
3486         // if this is the other hashmap then the object is moved.
3487 
3488         bool same_gen;
3489         if (i == 0) {
3490           assert(hashmap == young_hashmap, "checking");
3491           same_gen = is_in_young(oop);
3492         } else {
3493           same_gen = true;
3494         }
3495 
3496 
3497         if (same_gen) {
3498           // if the object has moved then re-hash it and move its
3499           // entry to its new location.
3500           unsigned int new_pos = JvmtiTagHashmap::hash(oop, size);
3501           if (new_pos != (unsigned int)pos) {
3502             if (prev == NULL) {
3503               table[pos] = next;
3504             } else {
3505               prev->set_next(next);
3506             }
3507             entry->set_next(table[new_pos]);
3508             table[new_pos] = entry;
3509             moved++;
3510           } else {
3511             // object didn't move
3512             prev = entry;
3513           }
3514         } else {
3515           // object has been promoted so remove the entry from the
3516           // young hashmap
3517           assert(hashmap == young_hashmap, "checking");
3518           hashmap->remove(prev, pos, entry);
3519 
3520           // move the entry to the promoted list
3521           entry->set_next(promoted_entries);
3522           promoted_entries = entry;
3523         }
3524 
3525         entry = next;
3526       }
3527     }
3528   }
3529 
3530 
3531   // add the entries, corresponding to the promoted objects, to the
3532   // other hashmap.
3533   JvmtiTagHashmapEntry* entry = promoted_entries;
3534   while (entry != NULL) {
3535     oop o = JNIHandles::resolve(entry->object());
3536     assert(hashmap_for(o) == other_hashmap, "checking");
3537     JvmtiTagHashmapEntry* next = entry->next();
3538     other_hashmap->add(o, entry);
3539     entry = next;
3540     promoted++;
3541   }
3542 
3543   // stats
3544   if (TraceJVMTIObjectTagging) {
3545     int total_moves = promoted + moved;
3546 
3547     int post_total = 0;
3548     for (int i=0; i<n_hashmaps; i++) {
3549       post_total += _hashmap[i]->_entry_count;
3550     }
3551     int pre_total = post_total + freed;
3552 
3553     tty->print("(%d->%d, %d freed, %d promoted, %d total moves)",
3554         pre_total, post_total, freed, promoted, total_moves);
3555   }
3556 }