src/share/vm/prims/jvmtiTagMap.cpp

Fri, 07 Jan 2011 10:42:32 -0500

author
phh
date
Fri, 07 Jan 2011 10:42:32 -0500
changeset 2423
b1a2afa37ec4
parent 2314
f95d63e2154a
child 2445
7246a374a9f2
permissions
-rw-r--r--

7003271: Hotspot should track cumulative Java heap bytes allocated on a per-thread basis
Summary: Track allocated bytes in Thread's, update on TLAB retirement and direct allocation in Eden and tenured, add JNI methods for ThreadMXBean.
Reviewed-by: coleenp, kvn, dholmes, ysr

     1 /*
     2  * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/symbolTable.hpp"
    27 #include "classfile/systemDictionary.hpp"
    28 #include "classfile/vmSymbols.hpp"
    29 #include "jvmtifiles/jvmtiEnv.hpp"
    30 #include "oops/objArrayKlass.hpp"
    31 #include "oops/oop.inline2.hpp"
    32 #include "prims/jvmtiEventController.hpp"
    33 #include "prims/jvmtiEventController.inline.hpp"
    34 #include "prims/jvmtiExport.hpp"
    35 #include "prims/jvmtiImpl.hpp"
    36 #include "prims/jvmtiTagMap.hpp"
    37 #include "runtime/biasedLocking.hpp"
    38 #include "runtime/javaCalls.hpp"
    39 #include "runtime/jniHandles.hpp"
    40 #include "runtime/mutex.hpp"
    41 #include "runtime/mutexLocker.hpp"
    42 #include "runtime/reflectionUtils.hpp"
    43 #include "runtime/vframe.hpp"
    44 #include "runtime/vmThread.hpp"
    45 #include "runtime/vm_operations.hpp"
    46 #include "services/serviceUtil.hpp"
    47 #ifndef SERIALGC
    48 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
    49 #endif
    51 // JvmtiTagHashmapEntry
    52 //
    53 // Each entry encapsulates a JNI weak reference to the tagged object
    54 // and the tag value. In addition an entry includes a next pointer which
    55 // is used to chain entries together.
    57 class JvmtiTagHashmapEntry : public CHeapObj {
    58  private:
    59   friend class JvmtiTagMap;
    61   jweak _object;                        // JNI weak ref to tagged object
    62   jlong _tag;                           // the tag
    63   JvmtiTagHashmapEntry* _next;          // next on the list
    65   inline void init(jweak object, jlong tag) {
    66     _object = object;
    67     _tag = tag;
    68     _next = NULL;
    69   }
    71   // constructor
    72   JvmtiTagHashmapEntry(jweak object, jlong tag)         { init(object, tag); }
    74  public:
    76   // accessor methods
    77   inline jweak object() const                           { return _object; }
    78   inline jlong tag() const                              { return _tag; }
    80   inline void set_tag(jlong tag) {
    81     assert(tag != 0, "can't be zero");
    82     _tag = tag;
    83   }
    85   inline JvmtiTagHashmapEntry* next() const             { return _next; }
    86   inline void set_next(JvmtiTagHashmapEntry* next)      { _next = next; }
    87 };
    90 // JvmtiTagHashmap
    91 //
    92 // A hashmap is essentially a table of pointers to entries. Entries
    93 // are hashed to a location, or position in the table, and then
    94 // chained from that location. The "key" for hashing is address of
    95 // the object, or oop. The "value" is the JNI weak reference to the
    96 // object and the tag value. Keys are not stored with the entry.
    97 // Instead the weak reference is resolved to obtain the key.
    98 //
    99 // A hashmap maintains a count of the number entries in the hashmap
   100 // and resizes if the number of entries exceeds a given threshold.
   101 // The threshold is specified as a percentage of the size - for
   102 // example a threshold of 0.75 will trigger the hashmap to resize
   103 // if the number of entries is >75% of table size.
   104 //
   105 // A hashmap provides functions for adding, removing, and finding
   106 // entries. It also provides a function to iterate over all entries
   107 // in the hashmap.
   109 class JvmtiTagHashmap : public CHeapObj {
   110  private:
   111   friend class JvmtiTagMap;
   113   enum {
   114     small_trace_threshold  = 10000,                  // threshold for tracing
   115     medium_trace_threshold = 100000,
   116     large_trace_threshold  = 1000000,
   117     initial_trace_threshold = small_trace_threshold
   118   };
   120   static int _sizes[];                  // array of possible hashmap sizes
   121   int _size;                            // actual size of the table
   122   int _size_index;                      // index into size table
   124   int _entry_count;                     // number of entries in the hashmap
   126   float _load_factor;                   // load factor as a % of the size
   127   int _resize_threshold;                // computed threshold to trigger resizing.
   128   bool _resizing_enabled;               // indicates if hashmap can resize
   130   int _trace_threshold;                 // threshold for trace messages
   132   JvmtiTagHashmapEntry** _table;        // the table of entries.
   134   // private accessors
   135   int resize_threshold() const                  { return _resize_threshold; }
   136   int trace_threshold() const                   { return _trace_threshold; }
   138   // initialize the hashmap
   139   void init(int size_index=0, float load_factor=4.0f) {
   140     int initial_size =  _sizes[size_index];
   141     _size_index = size_index;
   142     _size = initial_size;
   143     _entry_count = 0;
   144     if (TraceJVMTIObjectTagging) {
   145       _trace_threshold = initial_trace_threshold;
   146     } else {
   147       _trace_threshold = -1;
   148     }
   149     _load_factor = load_factor;
   150     _resize_threshold = (int)(_load_factor * _size);
   151     _resizing_enabled = true;
   152     size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
   153     _table = (JvmtiTagHashmapEntry**)os::malloc(s);
   154     if (_table == NULL) {
   155       vm_exit_out_of_memory(s, "unable to allocate initial hashtable for jvmti object tags");
   156     }
   157     for (int i=0; i<initial_size; i++) {
   158       _table[i] = NULL;
   159     }
   160   }
   162   // hash a given key (oop) with the specified size
   163   static unsigned int hash(oop key, int size) {
   164     // shift right to get better distribution (as these bits will be zero
   165     // with aligned addresses)
   166     unsigned int addr = (unsigned int)((intptr_t)key);
   167 #ifdef _LP64
   168     return (addr >> 3) % size;
   169 #else
   170     return (addr >> 2) % size;
   171 #endif
   172   }
   174   // hash a given key (oop)
   175   unsigned int hash(oop key) {
   176     return hash(key, _size);
   177   }
   179   // resize the hashmap - allocates a large table and re-hashes
   180   // all entries into the new table.
   181   void resize() {
   182     int new_size_index = _size_index+1;
   183     int new_size = _sizes[new_size_index];
   184     if (new_size < 0) {
   185       // hashmap already at maximum capacity
   186       return;
   187     }
   189     // allocate new table
   190     size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
   191     JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s);
   192     if (new_table == NULL) {
   193       warning("unable to allocate larger hashtable for jvmti object tags");
   194       set_resizing_enabled(false);
   195       return;
   196     }
   198     // initialize new table
   199     int i;
   200     for (i=0; i<new_size; i++) {
   201       new_table[i] = NULL;
   202     }
   204     // rehash all entries into the new table
   205     for (i=0; i<_size; i++) {
   206       JvmtiTagHashmapEntry* entry = _table[i];
   207       while (entry != NULL) {
   208         JvmtiTagHashmapEntry* next = entry->next();
   209         oop key = JNIHandles::resolve(entry->object());
   210         assert(key != NULL, "jni weak reference cleared!!");
   211         unsigned int h = hash(key, new_size);
   212         JvmtiTagHashmapEntry* anchor = new_table[h];
   213         if (anchor == NULL) {
   214           new_table[h] = entry;
   215           entry->set_next(NULL);
   216         } else {
   217           entry->set_next(anchor);
   218           new_table[h] = entry;
   219         }
   220         entry = next;
   221       }
   222     }
   224     // free old table and update settings.
   225     os::free((void*)_table);
   226     _table = new_table;
   227     _size_index = new_size_index;
   228     _size = new_size;
   230     // compute new resize threshold
   231     _resize_threshold = (int)(_load_factor * _size);
   232   }
   235   // internal remove function - remove an entry at a given position in the
   236   // table.
   237   inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
   238     assert(pos >= 0 && pos < _size, "out of range");
   239     if (prev == NULL) {
   240       _table[pos] = entry->next();
   241     } else {
   242       prev->set_next(entry->next());
   243     }
   244     assert(_entry_count > 0, "checking");
   245     _entry_count--;
   246   }
   248   // resizing switch
   249   bool is_resizing_enabled() const          { return _resizing_enabled; }
   250   void set_resizing_enabled(bool enable)    { _resizing_enabled = enable; }
   252   // debugging
   253   void print_memory_usage();
   254   void compute_next_trace_threshold();
   256  public:
   258   // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
   259   // The preferred size is rounded down to an actual size.
   260   JvmtiTagHashmap(int size, float load_factor=0.0f) {
   261     int i=0;
   262     while (_sizes[i] < size) {
   263       if (_sizes[i] < 0) {
   264         assert(i > 0, "sanity check");
   265         i--;
   266         break;
   267       }
   268       i++;
   269     }
   271     // if a load factor is specified then use it, otherwise use default
   272     if (load_factor > 0.01f) {
   273       init(i, load_factor);
   274     } else {
   275       init(i);
   276     }
   277   }
   279   // create a JvmtiTagHashmap with default settings
   280   JvmtiTagHashmap() {
   281     init();
   282   }
   284   // release table when JvmtiTagHashmap destroyed
   285   ~JvmtiTagHashmap() {
   286     if (_table != NULL) {
   287       os::free((void*)_table);
   288       _table = NULL;
   289     }
   290   }
   292   // accessors
   293   int size() const                              { return _size; }
   294   JvmtiTagHashmapEntry** table() const          { return _table; }
   295   int entry_count() const                       { return _entry_count; }
   297   // find an entry in the hashmap, returns NULL if not found.
   298   inline JvmtiTagHashmapEntry* find(oop key) {
   299     unsigned int h = hash(key);
   300     JvmtiTagHashmapEntry* entry = _table[h];
   301     while (entry != NULL) {
   302       oop orig_key = JNIHandles::resolve(entry->object());
   303       assert(orig_key != NULL, "jni weak reference cleared!!");
   304       if (key == orig_key) {
   305         break;
   306       }
   307       entry = entry->next();
   308     }
   309     return entry;
   310   }
   313   // add a new entry to hashmap
   314   inline void add(oop key, JvmtiTagHashmapEntry* entry) {
   315     assert(key != NULL, "checking");
   316     assert(find(key) == NULL, "duplicate detected");
   317     unsigned int h = hash(key);
   318     JvmtiTagHashmapEntry* anchor = _table[h];
   319     if (anchor == NULL) {
   320       _table[h] = entry;
   321       entry->set_next(NULL);
   322     } else {
   323       entry->set_next(anchor);
   324       _table[h] = entry;
   325     }
   327     _entry_count++;
   328     if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
   329       assert(TraceJVMTIObjectTagging, "should only get here when tracing");
   330       print_memory_usage();
   331       compute_next_trace_threshold();
   332     }
   334     // if the number of entries exceed the threshold then resize
   335     if (entry_count() > resize_threshold() && is_resizing_enabled()) {
   336       resize();
   337     }
   338   }
   340   // remove an entry with the given key.
   341   inline JvmtiTagHashmapEntry* remove(oop key) {
   342     unsigned int h = hash(key);
   343     JvmtiTagHashmapEntry* entry = _table[h];
   344     JvmtiTagHashmapEntry* prev = NULL;
   345     while (entry != NULL) {
   346       oop orig_key = JNIHandles::resolve(entry->object());
   347       assert(orig_key != NULL, "jni weak reference cleared!!");
   348       if (key == orig_key) {
   349         break;
   350       }
   351       prev = entry;
   352       entry = entry->next();
   353     }
   354     if (entry != NULL) {
   355       remove(prev, h, entry);
   356     }
   357     return entry;
   358   }
   360   // iterate over all entries in the hashmap
   361   void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
   362 };
   364 // possible hashmap sizes - odd primes that roughly double in size.
   365 // To avoid excessive resizing the odd primes from 4801-76831 and
   366 // 76831-307261 have been removed. The list must be terminated by -1.
   367 int JvmtiTagHashmap::_sizes[] =  { 4801, 76831, 307261, 614563, 1228891,
   368     2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
   371 // A supporting class for iterating over all entries in Hashmap
   372 class JvmtiTagHashmapEntryClosure {
   373  public:
   374   virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
   375 };
   378 // iterate over all entries in the hashmap
   379 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
   380   for (int i=0; i<_size; i++) {
   381     JvmtiTagHashmapEntry* entry = _table[i];
   382     JvmtiTagHashmapEntry* prev = NULL;
   383     while (entry != NULL) {
   384       // obtain the next entry before invoking do_entry - this is
   385       // necessary because do_entry may remove the entry from the
   386       // hashmap.
   387       JvmtiTagHashmapEntry* next = entry->next();
   388       closure->do_entry(entry);
   389       entry = next;
   390      }
   391   }
   392 }
   394 // debugging
   395 void JvmtiTagHashmap::print_memory_usage() {
   396   intptr_t p = (intptr_t)this;
   397   tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
   399   // table + entries in KB
   400   int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
   401     entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
   403   int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
   404   tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
   405     entry_count(), hashmap_usage, weak_globals_usage);
   406 }
   408 // compute threshold for the next trace message
   409 void JvmtiTagHashmap::compute_next_trace_threshold() {
   410   if (trace_threshold() < medium_trace_threshold) {
   411     _trace_threshold += small_trace_threshold;
   412   } else {
   413     if (trace_threshold() < large_trace_threshold) {
   414       _trace_threshold += medium_trace_threshold;
   415     } else {
   416       _trace_threshold += large_trace_threshold;
   417     }
   418   }
   419 }
   421 // memory region for young generation
   422 MemRegion JvmtiTagMap::_young_gen;
   424 // get the memory region used for the young generation
   425 void JvmtiTagMap::get_young_generation() {
   426   CollectedHeap* ch = Universe::heap();
   427   switch (ch->kind()) {
   428     case (CollectedHeap::GenCollectedHeap): {
   429       _young_gen = ((GenCollectedHeap*)ch)->get_gen(0)->reserved();
   430       break;
   431     }
   432 #ifndef SERIALGC
   433     case (CollectedHeap::ParallelScavengeHeap): {
   434       _young_gen = ((ParallelScavengeHeap*)ch)->young_gen()->reserved();
   435       break;
   436     }
   437     case (CollectedHeap::G1CollectedHeap): {
   438       // Until a more satisfactory solution is implemented, all
   439       // oops in the tag map will require rehash at each gc.
   440       // This is a correct, if extremely inefficient solution.
   441       // See RFE 6621729 for related commentary.
   442       _young_gen = ch->reserved_region();
   443       break;
   444     }
   445 #endif  // !SERIALGC
   446     default:
   447       ShouldNotReachHere();
   448   }
   449 }
   451 // returns true if oop is in the young generation
   452 inline bool JvmtiTagMap::is_in_young(oop o) {
   453   assert(_young_gen.start() != NULL, "checking");
   454   void* p = (void*)o;
   455   bool in_young = _young_gen.contains(p);
   456   return in_young;
   457 }
   459 // returns the appropriate hashmap for a given object
   460 inline JvmtiTagHashmap* JvmtiTagMap::hashmap_for(oop o) {
   461   if (is_in_young(o)) {
   462     return _hashmap[0];
   463   } else {
   464     return _hashmap[1];
   465   }
   466 }
   469 // create a JvmtiTagMap
   470 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
   471   _env(env),
   472   _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
   473   _free_entries(NULL),
   474   _free_entries_count(0)
   475 {
   476   assert(JvmtiThreadState_lock->is_locked(), "sanity check");
   477   assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
   479   // create the hashmaps
   480   for (int i=0; i<n_hashmaps; i++) {
   481     _hashmap[i] = new JvmtiTagHashmap();
   482   }
   484   // get the memory region used by the young generation
   485   get_young_generation();
   487   // finally add us to the environment
   488   ((JvmtiEnvBase *)env)->set_tag_map(this);
   489 }
   492 // destroy a JvmtiTagMap
   493 JvmtiTagMap::~JvmtiTagMap() {
   495   // no lock acquired as we assume the enclosing environment is
   496   // also being destroryed.
   497   ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
   499   // iterate over the hashmaps and destroy each of the entries
   500   for (int i=0; i<n_hashmaps; i++) {
   501     JvmtiTagHashmap* hashmap = _hashmap[i];
   502     JvmtiTagHashmapEntry** table = hashmap->table();
   503     for (int j=0; j<hashmap->size(); j++) {
   504       JvmtiTagHashmapEntry *entry = table[j];
   505       while (entry != NULL) {
   506         JvmtiTagHashmapEntry* next = entry->next();
   507         jweak ref = entry->object();
   508         JNIHandles::destroy_weak_global(ref);
   509         delete entry;
   510         entry = next;
   511       }
   512     }
   514     // finally destroy the hashmap
   515     delete hashmap;
   516   }
   518   // remove any entries on the free list
   519   JvmtiTagHashmapEntry* entry = _free_entries;
   520   while (entry != NULL) {
   521     JvmtiTagHashmapEntry* next = entry->next();
   522     delete entry;
   523     entry = next;
   524   }
   525 }
   527 // create a hashmap entry
   528 // - if there's an entry on the (per-environment) free list then this
   529 // is returned. Otherwise an new entry is allocated.
   530 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(jweak ref, jlong tag) {
   531   assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
   532   JvmtiTagHashmapEntry* entry;
   533   if (_free_entries == NULL) {
   534     entry = new JvmtiTagHashmapEntry(ref, tag);
   535   } else {
   536     assert(_free_entries_count > 0, "mismatched _free_entries_count");
   537     _free_entries_count--;
   538     entry = _free_entries;
   539     _free_entries = entry->next();
   540     entry->init(ref, tag);
   541   }
   542   return entry;
   543 }
   545 // destroy an entry by returning it to the free list
   546 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
   547   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
   548   // limit the size of the free list
   549   if (_free_entries_count >= max_free_entries) {
   550     delete entry;
   551   } else {
   552     entry->set_next(_free_entries);
   553     _free_entries = entry;
   554     _free_entries_count++;
   555   }
   556 }
   558 // returns the tag map for the given environments. If the tag map
   559 // doesn't exist then it is created.
   560 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
   561   JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
   562   if (tag_map == NULL) {
   563     MutexLocker mu(JvmtiThreadState_lock);
   564     tag_map = ((JvmtiEnvBase *)env)->tag_map();
   565     if (tag_map == NULL) {
   566       tag_map = new JvmtiTagMap(env);
   567     }
   568   } else {
   569     CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
   570   }
   571   return tag_map;
   572 }
   574 // iterate over all entries in the tag map.
   575 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
   576   for (int i=0; i<n_hashmaps; i++) {
   577     JvmtiTagHashmap* hashmap = _hashmap[i];
   578     hashmap->entry_iterate(closure);
   579   }
   580 }
   582 // returns true if the hashmaps are empty
   583 bool JvmtiTagMap::is_empty() {
   584   assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
   585   assert(n_hashmaps == 2, "not implemented");
   586   return ((_hashmap[0]->entry_count() == 0) && (_hashmap[1]->entry_count() == 0));
   587 }
   590 // Return the tag value for an object, or 0 if the object is
   591 // not tagged
   592 //
   593 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
   594   JvmtiTagHashmapEntry* entry = tag_map->hashmap_for(o)->find(o);
   595   if (entry == NULL) {
   596     return 0;
   597   } else {
   598     return entry->tag();
   599   }
   600 }
   602 // If the object is a java.lang.Class then return the klassOop,
   603 // otherwise return the original object
   604 static inline oop klassOop_if_java_lang_Class(oop o) {
   605   if (o->klass() == SystemDictionary::Class_klass()) {
   606     if (!java_lang_Class::is_primitive(o)) {
   607       o = (oop)java_lang_Class::as_klassOop(o);
   608       assert(o != NULL, "class for non-primitive mirror must exist");
   609     }
   610   }
   611   return o;
   612 }
   614 // A CallbackWrapper is a support class for querying and tagging an object
   615 // around a callback to a profiler. The constructor does pre-callback
   616 // work to get the tag value, klass tag value, ... and the destructor
   617 // does the post-callback work of tagging or untagging the object.
   618 //
   619 // {
   620 //   CallbackWrapper wrapper(tag_map, o);
   621 //
   622 //   (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
   623 //
   624 // } // wrapper goes out of scope here which results in the destructor
   625 //      checking to see if the object has been tagged, untagged, or the
   626 //      tag value has changed.
   627 //
   628 class CallbackWrapper : public StackObj {
   629  private:
   630   JvmtiTagMap* _tag_map;
   631   JvmtiTagHashmap* _hashmap;
   632   JvmtiTagHashmapEntry* _entry;
   633   oop _o;
   634   jlong _obj_size;
   635   jlong _obj_tag;
   636   klassOop _klass;         // the object's class
   637   jlong _klass_tag;
   639  protected:
   640   JvmtiTagMap* tag_map() const      { return _tag_map; }
   642   // invoked post-callback to tag, untag, or update the tag of an object
   643   void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
   644                                        JvmtiTagHashmapEntry* entry, jlong obj_tag);
   645  public:
   646   CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
   647     assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
   648            "MT unsafe or must be VM thread");
   650     // for Classes the klassOop is tagged
   651     _o = klassOop_if_java_lang_Class(o);
   653     // object size
   654     _obj_size = _o->size() * wordSize;
   656     // record the context
   657     _tag_map = tag_map;
   658     _hashmap = tag_map->hashmap_for(_o);
   659     _entry = _hashmap->find(_o);
   661     // get object tag
   662     _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
   664     // get the class and the class's tag value
   665     if (_o == o) {
   666       _klass = _o->klass();
   667     } else {
   668       // if the object represents a runtime class then use the
   669       // tag for java.lang.Class
   670       _klass = SystemDictionary::Class_klass();
   671     }
   672     _klass_tag = tag_for(tag_map, _klass);
   673   }
   675   ~CallbackWrapper() {
   676     post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
   677   }
   679   inline jlong* obj_tag_p()                     { return &_obj_tag; }
   680   inline jlong obj_size() const                 { return _obj_size; }
   681   inline jlong obj_tag() const                  { return _obj_tag; }
   682   inline klassOop klass() const                 { return _klass; }
   683   inline jlong klass_tag() const                { return _klass_tag; }
   684 };
   688 // callback post-callback to tag, untag, or update the tag of an object
   689 void inline CallbackWrapper::post_callback_tag_update(oop o,
   690                                                       JvmtiTagHashmap* hashmap,
   691                                                       JvmtiTagHashmapEntry* entry,
   692                                                       jlong obj_tag) {
   693   if (entry == NULL) {
   694     if (obj_tag != 0) {
   695       // callback has tagged the object
   696       assert(Thread::current()->is_VM_thread(), "must be VMThread");
   697       HandleMark hm;
   698       Handle h(o);
   699       jweak ref = JNIHandles::make_weak_global(h);
   700       entry = tag_map()->create_entry(ref, obj_tag);
   701       hashmap->add(o, entry);
   702     }
   703   } else {
   704     // object was previously tagged - the callback may have untagged
   705     // the object or changed the tag value
   706     if (obj_tag == 0) {
   707       jweak ref = entry->object();
   709       JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
   710       assert(entry_removed == entry, "checking");
   711       tag_map()->destroy_entry(entry);
   713       JNIHandles::destroy_weak_global(ref);
   714     } else {
   715       if (obj_tag != entry->tag()) {
   716          entry->set_tag(obj_tag);
   717       }
   718     }
   719   }
   720 }
   722 // An extended CallbackWrapper used when reporting an object reference
   723 // to the agent.
   724 //
   725 // {
   726 //   TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
   727 //
   728 //   (*callback)(wrapper.klass_tag(),
   729 //               wrapper.obj_size(),
   730 //               wrapper.obj_tag_p()
   731 //               wrapper.referrer_tag_p(), ...)
   732 //
   733 // } // wrapper goes out of scope here which results in the destructor
   734 //      checking to see if the referrer object has been tagged, untagged,
   735 //      or the tag value has changed.
   736 //
   737 class TwoOopCallbackWrapper : public CallbackWrapper {
   738  private:
   739   bool _is_reference_to_self;
   740   JvmtiTagHashmap* _referrer_hashmap;
   741   JvmtiTagHashmapEntry* _referrer_entry;
   742   oop _referrer;
   743   jlong _referrer_obj_tag;
   744   jlong _referrer_klass_tag;
   745   jlong* _referrer_tag_p;
   747   bool is_reference_to_self() const             { return _is_reference_to_self; }
   749  public:
   750   TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
   751     CallbackWrapper(tag_map, o)
   752   {
   753     // self reference needs to be handled in a special way
   754     _is_reference_to_self = (referrer == o);
   756     if (_is_reference_to_self) {
   757       _referrer_klass_tag = klass_tag();
   758       _referrer_tag_p = obj_tag_p();
   759     } else {
   760       // for Classes the klassOop is tagged
   761       _referrer = klassOop_if_java_lang_Class(referrer);
   762       // record the context
   763       _referrer_hashmap = tag_map->hashmap_for(_referrer);
   764       _referrer_entry = _referrer_hashmap->find(_referrer);
   766       // get object tag
   767       _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
   768       _referrer_tag_p = &_referrer_obj_tag;
   770       // get referrer class tag.
   771       klassOop k = (_referrer == referrer) ?  // Check if referrer is a class...
   772           _referrer->klass()                  // No, just get its class
   773          : SystemDictionary::Class_klass();   // Yes, its class is Class
   774       _referrer_klass_tag = tag_for(tag_map, k);
   775     }
   776   }
   778   ~TwoOopCallbackWrapper() {
   779     if (!is_reference_to_self()){
   780       post_callback_tag_update(_referrer,
   781                                _referrer_hashmap,
   782                                _referrer_entry,
   783                                _referrer_obj_tag);
   784     }
   785   }
   787   // address of referrer tag
   788   // (for a self reference this will return the same thing as obj_tag_p())
   789   inline jlong* referrer_tag_p()        { return _referrer_tag_p; }
   791   // referrer's class tag
   792   inline jlong referrer_klass_tag()     { return _referrer_klass_tag; }
   793 };
   795 // tag an object
   796 //
   797 // This function is performance critical. If many threads attempt to tag objects
   798 // around the same time then it's possible that the Mutex associated with the
   799 // tag map will be a hot lock. Eliminating this lock will not eliminate the issue
   800 // because creating a JNI weak reference requires acquiring a global lock also.
   801 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
   802   MutexLocker ml(lock());
   804   // resolve the object
   805   oop o = JNIHandles::resolve_non_null(object);
   807   // for Classes we tag the klassOop
   808   o = klassOop_if_java_lang_Class(o);
   810   // see if the object is already tagged
   811   JvmtiTagHashmap* hashmap = hashmap_for(o);
   812   JvmtiTagHashmapEntry* entry = hashmap->find(o);
   814   // if the object is not already tagged then we tag it
   815   if (entry == NULL) {
   816     if (tag != 0) {
   817       HandleMark hm;
   818       Handle h(o);
   819       jweak ref = JNIHandles::make_weak_global(h);
   821       // the object may have moved because make_weak_global may
   822       // have blocked - thus it is necessary resolve the handle
   823       // and re-hash the object.
   824       o = h();
   825       entry = create_entry(ref, tag);
   826       hashmap_for(o)->add(o, entry);
   827     } else {
   828       // no-op
   829     }
   830   } else {
   831     // if the object is already tagged then we either update
   832     // the tag (if a new tag value has been provided)
   833     // or remove the object if the new tag value is 0.
   834     // Removing the object requires that we also delete the JNI
   835     // weak ref to the object.
   836     if (tag == 0) {
   837       jweak ref = entry->object();
   838       hashmap->remove(o);
   839       destroy_entry(entry);
   840       JNIHandles::destroy_weak_global(ref);
   841     } else {
   842       entry->set_tag(tag);
   843     }
   844   }
   845 }
   847 // get the tag for an object
   848 jlong JvmtiTagMap::get_tag(jobject object) {
   849   MutexLocker ml(lock());
   851   // resolve the object
   852   oop o = JNIHandles::resolve_non_null(object);
   854   // for Classes get the tag from the klassOop
   855   return tag_for(this, klassOop_if_java_lang_Class(o));
   856 }
   859 // Helper class used to describe the static or instance fields of a class.
   860 // For each field it holds the field index (as defined by the JVMTI specification),
   861 // the field type, and the offset.
   863 class ClassFieldDescriptor: public CHeapObj {
   864  private:
   865   int _field_index;
   866   int _field_offset;
   867   char _field_type;
   868  public:
   869   ClassFieldDescriptor(int index, char type, int offset) :
   870     _field_index(index), _field_type(type), _field_offset(offset) {
   871   }
   872   int field_index()  const  { return _field_index; }
   873   char field_type()  const  { return _field_type; }
   874   int field_offset() const  { return _field_offset; }
   875 };
   877 class ClassFieldMap: public CHeapObj {
   878  private:
   879   enum {
   880     initial_field_count = 5
   881   };
   883   // list of field descriptors
   884   GrowableArray<ClassFieldDescriptor*>* _fields;
   886   // constructor
   887   ClassFieldMap();
   889   // add a field
   890   void add(int index, char type, int offset);
   892   // returns the field count for the given class
   893   static int compute_field_count(instanceKlassHandle ikh);
   895  public:
   896   ~ClassFieldMap();
   898   // access
   899   int field_count()                     { return _fields->length(); }
   900   ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
   902   // functions to create maps of static or instance fields
   903   static ClassFieldMap* create_map_of_static_fields(klassOop k);
   904   static ClassFieldMap* create_map_of_instance_fields(oop obj);
   905 };
   907 ClassFieldMap::ClassFieldMap() {
   908   _fields = new (ResourceObj::C_HEAP) GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
   909 }
   911 ClassFieldMap::~ClassFieldMap() {
   912   for (int i=0; i<_fields->length(); i++) {
   913     delete _fields->at(i);
   914   }
   915   delete _fields;
   916 }
   918 void ClassFieldMap::add(int index, char type, int offset) {
   919   ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
   920   _fields->append(field);
   921 }
   923 // Returns a heap allocated ClassFieldMap to describe the static fields
   924 // of the given class.
   925 //
   926 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(klassOop k) {
   927   HandleMark hm;
   928   instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
   930   // create the field map
   931   ClassFieldMap* field_map = new ClassFieldMap();
   933   FilteredFieldStream f(ikh, false, false);
   934   int max_field_index = f.field_count()-1;
   936   int index = 0;
   937   for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
   938     // ignore instance fields
   939     if (!fld.access_flags().is_static()) {
   940       continue;
   941     }
   942     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
   943   }
   944   return field_map;
   945 }
   947 // Returns a heap allocated ClassFieldMap to describe the instance fields
   948 // of the given class. All instance fields are included (this means public
   949 // and private fields declared in superclasses and superinterfaces too).
   950 //
   951 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
   952   HandleMark hm;
   953   instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
   955   // create the field map
   956   ClassFieldMap* field_map = new ClassFieldMap();
   958   FilteredFieldStream f(ikh, false, false);
   960   int max_field_index = f.field_count()-1;
   962   int index = 0;
   963   for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
   964     // ignore static fields
   965     if (fld.access_flags().is_static()) {
   966       continue;
   967     }
   968     field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
   969   }
   971   return field_map;
   972 }
   974 // Helper class used to cache a ClassFileMap for the instance fields of
   975 // a cache. A JvmtiCachedClassFieldMap can be cached by an instanceKlass during
   976 // heap iteration and avoid creating a field map for each object in the heap
   977 // (only need to create the map when the first instance of a class is encountered).
   978 //
   979 class JvmtiCachedClassFieldMap : public CHeapObj {
   980  private:
   981    enum {
   982      initial_class_count = 200
   983    };
   984   ClassFieldMap* _field_map;
   986   ClassFieldMap* field_map() const          { return _field_map; }
   988   JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
   989   ~JvmtiCachedClassFieldMap();
   991   static GrowableArray<instanceKlass*>* _class_list;
   992   static void add_to_class_list(instanceKlass* ik);
   994  public:
   995   // returns the field map for a given object (returning map cached
   996   // by instanceKlass if possible
   997   static ClassFieldMap* get_map_of_instance_fields(oop obj);
   999   // removes the field map from all instanceKlasses - should be
  1000   // called before VM operation completes
  1001   static void clear_cache();
  1003   // returns the number of ClassFieldMap cached by instanceKlasses
  1004   static int cached_field_map_count();
  1005 };
  1007 GrowableArray<instanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
  1009 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
  1010   _field_map = field_map;
  1013 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
  1014   if (_field_map != NULL) {
  1015     delete _field_map;
  1019 // Marker class to ensure that the class file map cache is only used in a defined
  1020 // scope.
  1021 class ClassFieldMapCacheMark : public StackObj {
  1022  private:
  1023    static bool _is_active;
  1024  public:
  1025    ClassFieldMapCacheMark() {
  1026      assert(Thread::current()->is_VM_thread(), "must be VMThread");
  1027      assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
  1028      assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
  1029      _is_active = true;
  1031    ~ClassFieldMapCacheMark() {
  1032      JvmtiCachedClassFieldMap::clear_cache();
  1033      _is_active = false;
  1035    static bool is_active() { return _is_active; }
  1036 };
  1038 bool ClassFieldMapCacheMark::_is_active;
  1041 // record that the given instanceKlass is caching a field map
  1042 void JvmtiCachedClassFieldMap::add_to_class_list(instanceKlass* ik) {
  1043   if (_class_list == NULL) {
  1044     _class_list = new (ResourceObj::C_HEAP) GrowableArray<instanceKlass*>(initial_class_count, true);
  1046   _class_list->push(ik);
  1049 // returns the instance field map for the given object
  1050 // (returns field map cached by the instanceKlass if possible)
  1051 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
  1052   assert(Thread::current()->is_VM_thread(), "must be VMThread");
  1053   assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
  1055   klassOop k = obj->klass();
  1056   instanceKlass* ik = instanceKlass::cast(k);
  1058   // return cached map if possible
  1059   JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
  1060   if (cached_map != NULL) {
  1061     assert(cached_map->field_map() != NULL, "missing field list");
  1062     return cached_map->field_map();
  1063   } else {
  1064     ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
  1065     cached_map = new JvmtiCachedClassFieldMap(field_map);
  1066     ik->set_jvmti_cached_class_field_map(cached_map);
  1067     add_to_class_list(ik);
  1068     return field_map;
  1072 // remove the fields maps cached from all instanceKlasses
  1073 void JvmtiCachedClassFieldMap::clear_cache() {
  1074   assert(Thread::current()->is_VM_thread(), "must be VMThread");
  1075   if (_class_list != NULL) {
  1076     for (int i = 0; i < _class_list->length(); i++) {
  1077       instanceKlass* ik = _class_list->at(i);
  1078       JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
  1079       assert(cached_map != NULL, "should not be NULL");
  1080       ik->set_jvmti_cached_class_field_map(NULL);
  1081       delete cached_map;  // deletes the encapsulated field map
  1083     delete _class_list;
  1084     _class_list = NULL;
  1088 // returns the number of ClassFieldMap cached by instanceKlasses
  1089 int JvmtiCachedClassFieldMap::cached_field_map_count() {
  1090   return (_class_list == NULL) ? 0 : _class_list->length();
  1093 // helper function to indicate if an object is filtered by its tag or class tag
  1094 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
  1095                                               jlong klass_tag,
  1096                                               int heap_filter) {
  1097   // apply the heap filter
  1098   if (obj_tag != 0) {
  1099     // filter out tagged objects
  1100     if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
  1101   } else {
  1102     // filter out untagged objects
  1103     if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
  1105   if (klass_tag != 0) {
  1106     // filter out objects with tagged classes
  1107     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
  1108   } else {
  1109     // filter out objects with untagged classes.
  1110     if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
  1112   return false;
  1115 // helper function to indicate if an object is filtered by a klass filter
  1116 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
  1117   if (!klass_filter.is_null()) {
  1118     if (obj->klass() != klass_filter()) {
  1119       return true;
  1122   return false;
  1125 // helper function to tell if a field is a primitive field or not
  1126 static inline bool is_primitive_field_type(char type) {
  1127   return (type != 'L' && type != '[');
  1130 // helper function to copy the value from location addr to jvalue.
  1131 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
  1132   switch (value_type) {
  1133     case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
  1134     case JVMTI_PRIMITIVE_TYPE_BYTE    : { v->b = *(jbyte*)addr;    break; }
  1135     case JVMTI_PRIMITIVE_TYPE_CHAR    : { v->c = *(jchar*)addr;    break; }
  1136     case JVMTI_PRIMITIVE_TYPE_SHORT   : { v->s = *(jshort*)addr;   break; }
  1137     case JVMTI_PRIMITIVE_TYPE_INT     : { v->i = *(jint*)addr;     break; }
  1138     case JVMTI_PRIMITIVE_TYPE_LONG    : { v->j = *(jlong*)addr;    break; }
  1139     case JVMTI_PRIMITIVE_TYPE_FLOAT   : { v->f = *(jfloat*)addr;   break; }
  1140     case JVMTI_PRIMITIVE_TYPE_DOUBLE  : { v->d = *(jdouble*)addr;  break; }
  1141     default: ShouldNotReachHere();
  1145 // helper function to invoke string primitive value callback
  1146 // returns visit control flags
  1147 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
  1148                                          CallbackWrapper* wrapper,
  1149                                          oop str,
  1150                                          void* user_data)
  1152   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
  1154   // get the string value and length
  1155   // (string value may be offset from the base)
  1156   int s_len = java_lang_String::length(str);
  1157   typeArrayOop s_value = java_lang_String::value(str);
  1158   int s_offset = java_lang_String::offset(str);
  1159   jchar* value;
  1160   if (s_len > 0) {
  1161     value = s_value->char_at_addr(s_offset);
  1162   } else {
  1163     value = (jchar*) s_value->base(T_CHAR);
  1166   // invoke the callback
  1167   return (*cb)(wrapper->klass_tag(),
  1168                wrapper->obj_size(),
  1169                wrapper->obj_tag_p(),
  1170                value,
  1171                (jint)s_len,
  1172                user_data);
  1175 // helper function to invoke string primitive value callback
  1176 // returns visit control flags
  1177 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
  1178                                                   CallbackWrapper* wrapper,
  1179                                                   oop obj,
  1180                                                   void* user_data)
  1182   assert(obj->is_typeArray(), "not a primitive array");
  1184   // get base address of first element
  1185   typeArrayOop array = typeArrayOop(obj);
  1186   BasicType type = typeArrayKlass::cast(array->klass())->element_type();
  1187   void* elements = array->base(type);
  1189   // jvmtiPrimitiveType is defined so this mapping is always correct
  1190   jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
  1192   return (*cb)(wrapper->klass_tag(),
  1193                wrapper->obj_size(),
  1194                wrapper->obj_tag_p(),
  1195                (jint)array->length(),
  1196                elem_type,
  1197                elements,
  1198                user_data);
  1201 // helper function to invoke the primitive field callback for all static fields
  1202 // of a given class
  1203 static jint invoke_primitive_field_callback_for_static_fields
  1204   (CallbackWrapper* wrapper,
  1205    oop obj,
  1206    jvmtiPrimitiveFieldCallback cb,
  1207    void* user_data)
  1209   // for static fields only the index will be set
  1210   static jvmtiHeapReferenceInfo reference_info = { 0 };
  1212   assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
  1213   if (java_lang_Class::is_primitive(obj)) {
  1214     return 0;
  1216   klassOop k = java_lang_Class::as_klassOop(obj);
  1217   Klass* klass = k->klass_part();
  1219   // ignore classes for object and type arrays
  1220   if (!klass->oop_is_instance()) {
  1221     return 0;
  1224   // ignore classes which aren't linked yet
  1225   instanceKlass* ik = instanceKlass::cast(k);
  1226   if (!ik->is_linked()) {
  1227     return 0;
  1230   // get the field map
  1231   ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
  1233   // invoke the callback for each static primitive field
  1234   for (int i=0; i<field_map->field_count(); i++) {
  1235     ClassFieldDescriptor* field = field_map->field_at(i);
  1237     // ignore non-primitive fields
  1238     char type = field->field_type();
  1239     if (!is_primitive_field_type(type)) {
  1240       continue;
  1242     // one-to-one mapping
  1243     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
  1245     // get offset and field value
  1246     int offset = field->field_offset();
  1247     address addr = (address)k + offset;
  1248     jvalue value;
  1249     copy_to_jvalue(&value, addr, value_type);
  1251     // field index
  1252     reference_info.field.index = field->field_index();
  1254     // invoke the callback
  1255     jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
  1256                      &reference_info,
  1257                      wrapper->klass_tag(),
  1258                      wrapper->obj_tag_p(),
  1259                      value,
  1260                      value_type,
  1261                      user_data);
  1262     if (res & JVMTI_VISIT_ABORT) {
  1263       delete field_map;
  1264       return res;
  1268   delete field_map;
  1269   return 0;
  1272 // helper function to invoke the primitive field callback for all instance fields
  1273 // of a given object
  1274 static jint invoke_primitive_field_callback_for_instance_fields(
  1275   CallbackWrapper* wrapper,
  1276   oop obj,
  1277   jvmtiPrimitiveFieldCallback cb,
  1278   void* user_data)
  1280   // for instance fields only the index will be set
  1281   static jvmtiHeapReferenceInfo reference_info = { 0 };
  1283   // get the map of the instance fields
  1284   ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
  1286   // invoke the callback for each instance primitive field
  1287   for (int i=0; i<fields->field_count(); i++) {
  1288     ClassFieldDescriptor* field = fields->field_at(i);
  1290     // ignore non-primitive fields
  1291     char type = field->field_type();
  1292     if (!is_primitive_field_type(type)) {
  1293       continue;
  1295     // one-to-one mapping
  1296     jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
  1298     // get offset and field value
  1299     int offset = field->field_offset();
  1300     address addr = (address)obj + offset;
  1301     jvalue value;
  1302     copy_to_jvalue(&value, addr, value_type);
  1304     // field index
  1305     reference_info.field.index = field->field_index();
  1307     // invoke the callback
  1308     jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
  1309                      &reference_info,
  1310                      wrapper->klass_tag(),
  1311                      wrapper->obj_tag_p(),
  1312                      value,
  1313                      value_type,
  1314                      user_data);
  1315     if (res & JVMTI_VISIT_ABORT) {
  1316       return res;
  1319   return 0;
  1323 // VM operation to iterate over all objects in the heap (both reachable
  1324 // and unreachable)
  1325 class VM_HeapIterateOperation: public VM_Operation {
  1326  private:
  1327   ObjectClosure* _blk;
  1328  public:
  1329   VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
  1331   VMOp_Type type() const { return VMOp_HeapIterateOperation; }
  1332   void doit() {
  1333     // allows class files maps to be cached during iteration
  1334     ClassFieldMapCacheMark cm;
  1336     // make sure that heap is parsable (fills TLABs with filler objects)
  1337     Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
  1339     // Verify heap before iteration - if the heap gets corrupted then
  1340     // JVMTI's IterateOverHeap will crash.
  1341     if (VerifyBeforeIteration) {
  1342       Universe::verify();
  1345     // do the iteration
  1346     // If this operation encounters a bad object when using CMS,
  1347     // consider using safe_object_iterate() which avoids perm gen
  1348     // objects that may contain bad references.
  1349     Universe::heap()->object_iterate(_blk);
  1351     // when sharing is enabled we must iterate over the shared spaces
  1352     if (UseSharedSpaces) {
  1353       GenCollectedHeap* gch = GenCollectedHeap::heap();
  1354       CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
  1355       gen->ro_space()->object_iterate(_blk);
  1356       gen->rw_space()->object_iterate(_blk);
  1360 };
  1363 // An ObjectClosure used to support the deprecated IterateOverHeap and
  1364 // IterateOverInstancesOfClass functions
  1365 class IterateOverHeapObjectClosure: public ObjectClosure {
  1366  private:
  1367   JvmtiTagMap* _tag_map;
  1368   KlassHandle _klass;
  1369   jvmtiHeapObjectFilter _object_filter;
  1370   jvmtiHeapObjectCallback _heap_object_callback;
  1371   const void* _user_data;
  1373   // accessors
  1374   JvmtiTagMap* tag_map() const                    { return _tag_map; }
  1375   jvmtiHeapObjectFilter object_filter() const     { return _object_filter; }
  1376   jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
  1377   KlassHandle klass() const                       { return _klass; }
  1378   const void* user_data() const                   { return _user_data; }
  1380   // indicates if iteration has been aborted
  1381   bool _iteration_aborted;
  1382   bool is_iteration_aborted() const               { return _iteration_aborted; }
  1383   void set_iteration_aborted(bool aborted)        { _iteration_aborted = aborted; }
  1385  public:
  1386   IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
  1387                                KlassHandle klass,
  1388                                jvmtiHeapObjectFilter object_filter,
  1389                                jvmtiHeapObjectCallback heap_object_callback,
  1390                                const void* user_data) :
  1391     _tag_map(tag_map),
  1392     _klass(klass),
  1393     _object_filter(object_filter),
  1394     _heap_object_callback(heap_object_callback),
  1395     _user_data(user_data),
  1396     _iteration_aborted(false)
  1400   void do_object(oop o);
  1401 };
  1403 // invoked for each object in the heap
  1404 void IterateOverHeapObjectClosure::do_object(oop o) {
  1405   // check if iteration has been halted
  1406   if (is_iteration_aborted()) return;
  1408   // ignore any objects that aren't visible to profiler
  1409   if (!ServiceUtil::visible_oop(o)) return;
  1411   // instanceof check when filtering by klass
  1412   if (!klass().is_null() && !o->is_a(klass()())) {
  1413     return;
  1415   // prepare for the calllback
  1416   CallbackWrapper wrapper(tag_map(), o);
  1418   // if the object is tagged and we're only interested in untagged objects
  1419   // then don't invoke the callback. Similiarly, if the object is untagged
  1420   // and we're only interested in tagged objects we skip the callback.
  1421   if (wrapper.obj_tag() != 0) {
  1422     if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
  1423   } else {
  1424     if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
  1427   // invoke the agent's callback
  1428   jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
  1429                                                        wrapper.obj_size(),
  1430                                                        wrapper.obj_tag_p(),
  1431                                                        (void*)user_data());
  1432   if (control == JVMTI_ITERATION_ABORT) {
  1433     set_iteration_aborted(true);
  1437 // An ObjectClosure used to support the IterateThroughHeap function
  1438 class IterateThroughHeapObjectClosure: public ObjectClosure {
  1439  private:
  1440   JvmtiTagMap* _tag_map;
  1441   KlassHandle _klass;
  1442   int _heap_filter;
  1443   const jvmtiHeapCallbacks* _callbacks;
  1444   const void* _user_data;
  1446   // accessor functions
  1447   JvmtiTagMap* tag_map() const                     { return _tag_map; }
  1448   int heap_filter() const                          { return _heap_filter; }
  1449   const jvmtiHeapCallbacks* callbacks() const      { return _callbacks; }
  1450   KlassHandle klass() const                        { return _klass; }
  1451   const void* user_data() const                    { return _user_data; }
  1453   // indicates if the iteration has been aborted
  1454   bool _iteration_aborted;
  1455   bool is_iteration_aborted() const                { return _iteration_aborted; }
  1457   // used to check the visit control flags. If the abort flag is set
  1458   // then we set the iteration aborted flag so that the iteration completes
  1459   // without processing any further objects
  1460   bool check_flags_for_abort(jint flags) {
  1461     bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
  1462     if (is_abort) {
  1463       _iteration_aborted = true;
  1465     return is_abort;
  1468  public:
  1469   IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
  1470                                   KlassHandle klass,
  1471                                   int heap_filter,
  1472                                   const jvmtiHeapCallbacks* heap_callbacks,
  1473                                   const void* user_data) :
  1474     _tag_map(tag_map),
  1475     _klass(klass),
  1476     _heap_filter(heap_filter),
  1477     _callbacks(heap_callbacks),
  1478     _user_data(user_data),
  1479     _iteration_aborted(false)
  1483   void do_object(oop o);
  1484 };
  1486 // invoked for each object in the heap
  1487 void IterateThroughHeapObjectClosure::do_object(oop obj) {
  1488   // check if iteration has been halted
  1489   if (is_iteration_aborted()) return;
  1491   // ignore any objects that aren't visible to profiler
  1492   if (!ServiceUtil::visible_oop(obj)) return;
  1494   // apply class filter
  1495   if (is_filtered_by_klass_filter(obj, klass())) return;
  1497   // prepare for callback
  1498   CallbackWrapper wrapper(tag_map(), obj);
  1500   // check if filtered by the heap filter
  1501   if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
  1502     return;
  1505   // for arrays we need the length, otherwise -1
  1506   bool is_array = obj->is_array();
  1507   int len = is_array ? arrayOop(obj)->length() : -1;
  1509   // invoke the object callback (if callback is provided)
  1510   if (callbacks()->heap_iteration_callback != NULL) {
  1511     jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
  1512     jint res = (*cb)(wrapper.klass_tag(),
  1513                      wrapper.obj_size(),
  1514                      wrapper.obj_tag_p(),
  1515                      (jint)len,
  1516                      (void*)user_data());
  1517     if (check_flags_for_abort(res)) return;
  1520   // for objects and classes we report primitive fields if callback provided
  1521   if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
  1522     jint res;
  1523     jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
  1524     if (obj->klass() == SystemDictionary::Class_klass()) {
  1525       res = invoke_primitive_field_callback_for_static_fields(&wrapper,
  1526                                                                     obj,
  1527                                                                     cb,
  1528                                                                     (void*)user_data());
  1529     } else {
  1530       res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
  1531                                                                       obj,
  1532                                                                       cb,
  1533                                                                       (void*)user_data());
  1535     if (check_flags_for_abort(res)) return;
  1538   // string callback
  1539   if (!is_array &&
  1540       callbacks()->string_primitive_value_callback != NULL &&
  1541       obj->klass() == SystemDictionary::String_klass()) {
  1542     jint res = invoke_string_value_callback(
  1543                 callbacks()->string_primitive_value_callback,
  1544                 &wrapper,
  1545                 obj,
  1546                 (void*)user_data() );
  1547     if (check_flags_for_abort(res)) return;
  1550   // array callback
  1551   if (is_array &&
  1552       callbacks()->array_primitive_value_callback != NULL &&
  1553       obj->is_typeArray()) {
  1554     jint res = invoke_array_primitive_value_callback(
  1555                callbacks()->array_primitive_value_callback,
  1556                &wrapper,
  1557                obj,
  1558                (void*)user_data() );
  1559     if (check_flags_for_abort(res)) return;
  1561 };
  1564 // Deprecated function to iterate over all objects in the heap
  1565 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
  1566                                     KlassHandle klass,
  1567                                     jvmtiHeapObjectCallback heap_object_callback,
  1568                                     const void* user_data)
  1570   MutexLocker ml(Heap_lock);
  1571   IterateOverHeapObjectClosure blk(this,
  1572                                    klass,
  1573                                    object_filter,
  1574                                    heap_object_callback,
  1575                                    user_data);
  1576   VM_HeapIterateOperation op(&blk);
  1577   VMThread::execute(&op);
  1581 // Iterates over all objects in the heap
  1582 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
  1583                                        KlassHandle klass,
  1584                                        const jvmtiHeapCallbacks* callbacks,
  1585                                        const void* user_data)
  1587   MutexLocker ml(Heap_lock);
  1588   IterateThroughHeapObjectClosure blk(this,
  1589                                       klass,
  1590                                       heap_filter,
  1591                                       callbacks,
  1592                                       user_data);
  1593   VM_HeapIterateOperation op(&blk);
  1594   VMThread::execute(&op);
  1597 // support class for get_objects_with_tags
  1599 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
  1600  private:
  1601   JvmtiEnv* _env;
  1602   jlong* _tags;
  1603   jint _tag_count;
  1605   GrowableArray<jobject>* _object_results;  // collected objects (JNI weak refs)
  1606   GrowableArray<uint64_t>* _tag_results;    // collected tags
  1608  public:
  1609   TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
  1610     _env = env;
  1611     _tags = (jlong*)tags;
  1612     _tag_count = tag_count;
  1613     _object_results = new (ResourceObj::C_HEAP) GrowableArray<jobject>(1,true);
  1614     _tag_results = new (ResourceObj::C_HEAP) GrowableArray<uint64_t>(1,true);
  1617   ~TagObjectCollector() {
  1618     delete _object_results;
  1619     delete _tag_results;
  1622   // for each tagged object check if the tag value matches
  1623   // - if it matches then we create a JNI local reference to the object
  1624   // and record the reference and tag value.
  1625   //
  1626   void do_entry(JvmtiTagHashmapEntry* entry) {
  1627     for (int i=0; i<_tag_count; i++) {
  1628       if (_tags[i] == entry->tag()) {
  1629         oop o = JNIHandles::resolve(entry->object());
  1630         assert(o != NULL && o != JNIHandles::deleted_handle(), "sanity check");
  1632         // the mirror is tagged
  1633         if (o->is_klass()) {
  1634           klassOop k = (klassOop)o;
  1635           o = Klass::cast(k)->java_mirror();
  1638         jobject ref = JNIHandles::make_local(JavaThread::current(), o);
  1639         _object_results->append(ref);
  1640         _tag_results->append((uint64_t)entry->tag());
  1645   // return the results from the collection
  1646   //
  1647   jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
  1648     jvmtiError error;
  1649     int count = _object_results->length();
  1650     assert(count >= 0, "sanity check");
  1652     // if object_result_ptr is not NULL then allocate the result and copy
  1653     // in the object references.
  1654     if (object_result_ptr != NULL) {
  1655       error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
  1656       if (error != JVMTI_ERROR_NONE) {
  1657         return error;
  1659       for (int i=0; i<count; i++) {
  1660         (*object_result_ptr)[i] = _object_results->at(i);
  1664     // if tag_result_ptr is not NULL then allocate the result and copy
  1665     // in the tag values.
  1666     if (tag_result_ptr != NULL) {
  1667       error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
  1668       if (error != JVMTI_ERROR_NONE) {
  1669         if (object_result_ptr != NULL) {
  1670           _env->Deallocate((unsigned char*)object_result_ptr);
  1672         return error;
  1674       for (int i=0; i<count; i++) {
  1675         (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
  1679     *count_ptr = count;
  1680     return JVMTI_ERROR_NONE;
  1682 };
  1684 // return the list of objects with the specified tags
  1685 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
  1686   jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
  1688   TagObjectCollector collector(env(), tags, count);
  1690     // iterate over all tagged objects
  1691     MutexLocker ml(lock());
  1692     entry_iterate(&collector);
  1694   return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
  1698 // ObjectMarker is used to support the marking objects when walking the
  1699 // heap.
  1700 //
  1701 // This implementation uses the existing mark bits in an object for
  1702 // marking. Objects that are marked must later have their headers restored.
  1703 // As most objects are unlocked and don't have their identity hash computed
  1704 // we don't have to save their headers. Instead we save the headers that
  1705 // are "interesting". Later when the headers are restored this implementation
  1706 // restores all headers to their initial value and then restores the few
  1707 // objects that had interesting headers.
  1708 //
  1709 // Future work: This implementation currently uses growable arrays to save
  1710 // the oop and header of interesting objects. As an optimization we could
  1711 // use the same technique as the GC and make use of the unused area
  1712 // between top() and end().
  1713 //
  1715 // An ObjectClosure used to restore the mark bits of an object
  1716 class RestoreMarksClosure : public ObjectClosure {
  1717  public:
  1718   void do_object(oop o) {
  1719     if (o != NULL) {
  1720       markOop mark = o->mark();
  1721       if (mark->is_marked()) {
  1722         o->init_mark();
  1726 };
  1728 // ObjectMarker provides the mark and visited functions
  1729 class ObjectMarker : AllStatic {
  1730  private:
  1731   // saved headers
  1732   static GrowableArray<oop>* _saved_oop_stack;
  1733   static GrowableArray<markOop>* _saved_mark_stack;
  1735  public:
  1736   static void init();                       // initialize
  1737   static void done();                       // clean-up
  1739   static inline void mark(oop o);           // mark an object
  1740   static inline bool visited(oop o);        // check if object has been visited
  1741 };
  1743 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
  1744 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
  1746 // initialize ObjectMarker - prepares for object marking
  1747 void ObjectMarker::init() {
  1748   assert(Thread::current()->is_VM_thread(), "must be VMThread");
  1750   // prepare heap for iteration
  1751   Universe::heap()->ensure_parsability(false);  // no need to retire TLABs
  1753   // create stacks for interesting headers
  1754   _saved_mark_stack = new (ResourceObj::C_HEAP) GrowableArray<markOop>(4000, true);
  1755   _saved_oop_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
  1757   if (UseBiasedLocking) {
  1758     BiasedLocking::preserve_marks();
  1762 // Object marking is done so restore object headers
  1763 void ObjectMarker::done() {
  1764   // iterate over all objects and restore the mark bits to
  1765   // their initial value
  1766   RestoreMarksClosure blk;
  1767   Universe::heap()->object_iterate(&blk);
  1769   // When sharing is enabled we need to restore the headers of the objects
  1770   // in the readwrite space too.
  1771   if (UseSharedSpaces) {
  1772     GenCollectedHeap* gch = GenCollectedHeap::heap();
  1773     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
  1774     gen->rw_space()->object_iterate(&blk);
  1777   // now restore the interesting headers
  1778   for (int i = 0; i < _saved_oop_stack->length(); i++) {
  1779     oop o = _saved_oop_stack->at(i);
  1780     markOop mark = _saved_mark_stack->at(i);
  1781     o->set_mark(mark);
  1784   if (UseBiasedLocking) {
  1785     BiasedLocking::restore_marks();
  1788   // free the stacks
  1789   delete _saved_oop_stack;
  1790   delete _saved_mark_stack;
  1793 // mark an object
  1794 inline void ObjectMarker::mark(oop o) {
  1795   assert(Universe::heap()->is_in(o), "sanity check");
  1796   assert(!o->mark()->is_marked(), "should only mark an object once");
  1798   // object's mark word
  1799   markOop mark = o->mark();
  1801   if (mark->must_be_preserved(o)) {
  1802     _saved_mark_stack->push(mark);
  1803     _saved_oop_stack->push(o);
  1806   // mark the object
  1807   o->set_mark(markOopDesc::prototype()->set_marked());
  1810 // return true if object is marked
  1811 inline bool ObjectMarker::visited(oop o) {
  1812   return o->mark()->is_marked();
  1815 // Stack allocated class to help ensure that ObjectMarker is used
  1816 // correctly. Constructor initializes ObjectMarker, destructor calls
  1817 // ObjectMarker's done() function to restore object headers.
  1818 class ObjectMarkerController : public StackObj {
  1819  public:
  1820   ObjectMarkerController() {
  1821     ObjectMarker::init();
  1823   ~ObjectMarkerController() {
  1824     ObjectMarker::done();
  1826 };
  1829 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
  1830 // (not performance critical as only used for roots)
  1831 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
  1832   switch (kind) {
  1833     case JVMTI_HEAP_REFERENCE_JNI_GLOBAL:   return JVMTI_HEAP_ROOT_JNI_GLOBAL;
  1834     case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
  1835     case JVMTI_HEAP_REFERENCE_MONITOR:      return JVMTI_HEAP_ROOT_MONITOR;
  1836     case JVMTI_HEAP_REFERENCE_STACK_LOCAL:  return JVMTI_HEAP_ROOT_STACK_LOCAL;
  1837     case JVMTI_HEAP_REFERENCE_JNI_LOCAL:    return JVMTI_HEAP_ROOT_JNI_LOCAL;
  1838     case JVMTI_HEAP_REFERENCE_THREAD:       return JVMTI_HEAP_ROOT_THREAD;
  1839     case JVMTI_HEAP_REFERENCE_OTHER:        return JVMTI_HEAP_ROOT_OTHER;
  1840     default: ShouldNotReachHere();          return JVMTI_HEAP_ROOT_OTHER;
  1844 // Base class for all heap walk contexts. The base class maintains a flag
  1845 // to indicate if the context is valid or not.
  1846 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
  1847  private:
  1848   bool _valid;
  1849  public:
  1850   HeapWalkContext(bool valid)                   { _valid = valid; }
  1851   void invalidate()                             { _valid = false; }
  1852   bool is_valid() const                         { return _valid; }
  1853 };
  1855 // A basic heap walk context for the deprecated heap walking functions.
  1856 // The context for a basic heap walk are the callbacks and fields used by
  1857 // the referrer caching scheme.
  1858 class BasicHeapWalkContext: public HeapWalkContext {
  1859  private:
  1860   jvmtiHeapRootCallback _heap_root_callback;
  1861   jvmtiStackReferenceCallback _stack_ref_callback;
  1862   jvmtiObjectReferenceCallback _object_ref_callback;
  1864   // used for caching
  1865   oop _last_referrer;
  1866   jlong _last_referrer_tag;
  1868  public:
  1869   BasicHeapWalkContext() : HeapWalkContext(false) { }
  1871   BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
  1872                        jvmtiStackReferenceCallback stack_ref_callback,
  1873                        jvmtiObjectReferenceCallback object_ref_callback) :
  1874     HeapWalkContext(true),
  1875     _heap_root_callback(heap_root_callback),
  1876     _stack_ref_callback(stack_ref_callback),
  1877     _object_ref_callback(object_ref_callback),
  1878     _last_referrer(NULL),
  1879     _last_referrer_tag(0) {
  1882   // accessors
  1883   jvmtiHeapRootCallback heap_root_callback() const         { return _heap_root_callback; }
  1884   jvmtiStackReferenceCallback stack_ref_callback() const   { return _stack_ref_callback; }
  1885   jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback;  }
  1887   oop last_referrer() const               { return _last_referrer; }
  1888   void set_last_referrer(oop referrer)    { _last_referrer = referrer; }
  1889   jlong last_referrer_tag() const         { return _last_referrer_tag; }
  1890   void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
  1891 };
  1893 // The advanced heap walk context for the FollowReferences functions.
  1894 // The context is the callbacks, and the fields used for filtering.
  1895 class AdvancedHeapWalkContext: public HeapWalkContext {
  1896  private:
  1897   jint _heap_filter;
  1898   KlassHandle _klass_filter;
  1899   const jvmtiHeapCallbacks* _heap_callbacks;
  1901  public:
  1902   AdvancedHeapWalkContext() : HeapWalkContext(false) { }
  1904   AdvancedHeapWalkContext(jint heap_filter,
  1905                            KlassHandle klass_filter,
  1906                            const jvmtiHeapCallbacks* heap_callbacks) :
  1907     HeapWalkContext(true),
  1908     _heap_filter(heap_filter),
  1909     _klass_filter(klass_filter),
  1910     _heap_callbacks(heap_callbacks) {
  1913   // accessors
  1914   jint heap_filter() const         { return _heap_filter; }
  1915   KlassHandle klass_filter() const { return _klass_filter; }
  1917   const jvmtiHeapReferenceCallback heap_reference_callback() const {
  1918     return _heap_callbacks->heap_reference_callback;
  1919   };
  1920   const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
  1921     return _heap_callbacks->primitive_field_callback;
  1923   const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
  1924     return _heap_callbacks->array_primitive_value_callback;
  1926   const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
  1927     return _heap_callbacks->string_primitive_value_callback;
  1929 };
  1931 // The CallbackInvoker is a class with static functions that the heap walk can call
  1932 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
  1933 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
  1934 // mode is for the newer FollowReferences function which supports a lot of
  1935 // additional callbacks.
  1936 class CallbackInvoker : AllStatic {
  1937  private:
  1938   // heap walk styles
  1939   enum { basic, advanced };
  1940   static int _heap_walk_type;
  1941   static bool is_basic_heap_walk()           { return _heap_walk_type == basic; }
  1942   static bool is_advanced_heap_walk()        { return _heap_walk_type == advanced; }
  1944   // context for basic style heap walk
  1945   static BasicHeapWalkContext _basic_context;
  1946   static BasicHeapWalkContext* basic_context() {
  1947     assert(_basic_context.is_valid(), "invalid");
  1948     return &_basic_context;
  1951   // context for advanced style heap walk
  1952   static AdvancedHeapWalkContext _advanced_context;
  1953   static AdvancedHeapWalkContext* advanced_context() {
  1954     assert(_advanced_context.is_valid(), "invalid");
  1955     return &_advanced_context;
  1958   // context needed for all heap walks
  1959   static JvmtiTagMap* _tag_map;
  1960   static const void* _user_data;
  1961   static GrowableArray<oop>* _visit_stack;
  1963   // accessors
  1964   static JvmtiTagMap* tag_map()                        { return _tag_map; }
  1965   static const void* user_data()                       { return _user_data; }
  1966   static GrowableArray<oop>* visit_stack()             { return _visit_stack; }
  1968   // if the object hasn't been visited then push it onto the visit stack
  1969   // so that it will be visited later
  1970   static inline bool check_for_visit(oop obj) {
  1971     if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
  1972     return true;
  1975   // invoke basic style callbacks
  1976   static inline bool invoke_basic_heap_root_callback
  1977     (jvmtiHeapRootKind root_kind, oop obj);
  1978   static inline bool invoke_basic_stack_ref_callback
  1979     (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
  1980      int slot, oop obj);
  1981   static inline bool invoke_basic_object_reference_callback
  1982     (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
  1984   // invoke advanced style callbacks
  1985   static inline bool invoke_advanced_heap_root_callback
  1986     (jvmtiHeapReferenceKind ref_kind, oop obj);
  1987   static inline bool invoke_advanced_stack_ref_callback
  1988     (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
  1989      jmethodID method, jlocation bci, jint slot, oop obj);
  1990   static inline bool invoke_advanced_object_reference_callback
  1991     (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
  1993   // used to report the value of primitive fields
  1994   static inline bool report_primitive_field
  1995     (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
  1997  public:
  1998   // initialize for basic mode
  1999   static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
  2000                                              GrowableArray<oop>* visit_stack,
  2001                                              const void* user_data,
  2002                                              BasicHeapWalkContext context);
  2004   // initialize for advanced mode
  2005   static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
  2006                                                 GrowableArray<oop>* visit_stack,
  2007                                                 const void* user_data,
  2008                                                 AdvancedHeapWalkContext context);
  2010    // functions to report roots
  2011   static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
  2012   static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
  2013     jmethodID m, oop o);
  2014   static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
  2015     jmethodID method, jlocation bci, jint slot, oop o);
  2017   // functions to report references
  2018   static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
  2019   static inline bool report_class_reference(oop referrer, oop referree);
  2020   static inline bool report_class_loader_reference(oop referrer, oop referree);
  2021   static inline bool report_signers_reference(oop referrer, oop referree);
  2022   static inline bool report_protection_domain_reference(oop referrer, oop referree);
  2023   static inline bool report_superclass_reference(oop referrer, oop referree);
  2024   static inline bool report_interface_reference(oop referrer, oop referree);
  2025   static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
  2026   static inline bool report_field_reference(oop referrer, oop referree, jint slot);
  2027   static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
  2028   static inline bool report_primitive_array_values(oop array);
  2029   static inline bool report_string_value(oop str);
  2030   static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
  2031   static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
  2032 };
  2034 // statics
  2035 int CallbackInvoker::_heap_walk_type;
  2036 BasicHeapWalkContext CallbackInvoker::_basic_context;
  2037 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
  2038 JvmtiTagMap* CallbackInvoker::_tag_map;
  2039 const void* CallbackInvoker::_user_data;
  2040 GrowableArray<oop>* CallbackInvoker::_visit_stack;
  2042 // initialize for basic heap walk (IterateOverReachableObjects et al)
  2043 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
  2044                                                      GrowableArray<oop>* visit_stack,
  2045                                                      const void* user_data,
  2046                                                      BasicHeapWalkContext context) {
  2047   _tag_map = tag_map;
  2048   _visit_stack = visit_stack;
  2049   _user_data = user_data;
  2050   _basic_context = context;
  2051   _advanced_context.invalidate();       // will trigger assertion if used
  2052   _heap_walk_type = basic;
  2055 // initialize for advanced heap walk (FollowReferences)
  2056 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
  2057                                                         GrowableArray<oop>* visit_stack,
  2058                                                         const void* user_data,
  2059                                                         AdvancedHeapWalkContext context) {
  2060   _tag_map = tag_map;
  2061   _visit_stack = visit_stack;
  2062   _user_data = user_data;
  2063   _advanced_context = context;
  2064   _basic_context.invalidate();      // will trigger assertion if used
  2065   _heap_walk_type = advanced;
  2069 // invoke basic style heap root callback
  2070 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
  2071   assert(ServiceUtil::visible_oop(obj), "checking");
  2073   // if we heap roots should be reported
  2074   jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
  2075   if (cb == NULL) {
  2076     return check_for_visit(obj);
  2079   CallbackWrapper wrapper(tag_map(), obj);
  2080   jvmtiIterationControl control = (*cb)(root_kind,
  2081                                         wrapper.klass_tag(),
  2082                                         wrapper.obj_size(),
  2083                                         wrapper.obj_tag_p(),
  2084                                         (void*)user_data());
  2085   // push root to visit stack when following references
  2086   if (control == JVMTI_ITERATION_CONTINUE &&
  2087       basic_context()->object_ref_callback() != NULL) {
  2088     visit_stack()->push(obj);
  2090   return control != JVMTI_ITERATION_ABORT;
  2093 // invoke basic style stack ref callback
  2094 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
  2095                                                              jlong thread_tag,
  2096                                                              jint depth,
  2097                                                              jmethodID method,
  2098                                                              jint slot,
  2099                                                              oop obj) {
  2100   assert(ServiceUtil::visible_oop(obj), "checking");
  2102   // if we stack refs should be reported
  2103   jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
  2104   if (cb == NULL) {
  2105     return check_for_visit(obj);
  2108   CallbackWrapper wrapper(tag_map(), obj);
  2109   jvmtiIterationControl control = (*cb)(root_kind,
  2110                                         wrapper.klass_tag(),
  2111                                         wrapper.obj_size(),
  2112                                         wrapper.obj_tag_p(),
  2113                                         thread_tag,
  2114                                         depth,
  2115                                         method,
  2116                                         slot,
  2117                                         (void*)user_data());
  2118   // push root to visit stack when following references
  2119   if (control == JVMTI_ITERATION_CONTINUE &&
  2120       basic_context()->object_ref_callback() != NULL) {
  2121     visit_stack()->push(obj);
  2123   return control != JVMTI_ITERATION_ABORT;
  2126 // invoke basic style object reference callback
  2127 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
  2128                                                                     oop referrer,
  2129                                                                     oop referree,
  2130                                                                     jint index) {
  2132   assert(ServiceUtil::visible_oop(referrer), "checking");
  2133   assert(ServiceUtil::visible_oop(referree), "checking");
  2135   BasicHeapWalkContext* context = basic_context();
  2137   // callback requires the referrer's tag. If it's the same referrer
  2138   // as the last call then we use the cached value.
  2139   jlong referrer_tag;
  2140   if (referrer == context->last_referrer()) {
  2141     referrer_tag = context->last_referrer_tag();
  2142   } else {
  2143     referrer_tag = tag_for(tag_map(), klassOop_if_java_lang_Class(referrer));
  2146   // do the callback
  2147   CallbackWrapper wrapper(tag_map(), referree);
  2148   jvmtiObjectReferenceCallback cb = context->object_ref_callback();
  2149   jvmtiIterationControl control = (*cb)(ref_kind,
  2150                                         wrapper.klass_tag(),
  2151                                         wrapper.obj_size(),
  2152                                         wrapper.obj_tag_p(),
  2153                                         referrer_tag,
  2154                                         index,
  2155                                         (void*)user_data());
  2157   // record referrer and referrer tag. For self-references record the
  2158   // tag value from the callback as this might differ from referrer_tag.
  2159   context->set_last_referrer(referrer);
  2160   if (referrer == referree) {
  2161     context->set_last_referrer_tag(*wrapper.obj_tag_p());
  2162   } else {
  2163     context->set_last_referrer_tag(referrer_tag);
  2166   if (control == JVMTI_ITERATION_CONTINUE) {
  2167     return check_for_visit(referree);
  2168   } else {
  2169     return control != JVMTI_ITERATION_ABORT;
  2173 // invoke advanced style heap root callback
  2174 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
  2175                                                                 oop obj) {
  2176   assert(ServiceUtil::visible_oop(obj), "checking");
  2178   AdvancedHeapWalkContext* context = advanced_context();
  2180   // check that callback is provided
  2181   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
  2182   if (cb == NULL) {
  2183     return check_for_visit(obj);
  2186   // apply class filter
  2187   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
  2188     return check_for_visit(obj);
  2191   // setup the callback wrapper
  2192   CallbackWrapper wrapper(tag_map(), obj);
  2194   // apply tag filter
  2195   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2196                                  wrapper.klass_tag(),
  2197                                  context->heap_filter())) {
  2198     return check_for_visit(obj);
  2201   // for arrays we need the length, otherwise -1
  2202   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
  2204   // invoke the callback
  2205   jint res  = (*cb)(ref_kind,
  2206                     NULL, // referrer info
  2207                     wrapper.klass_tag(),
  2208                     0,    // referrer_class_tag is 0 for heap root
  2209                     wrapper.obj_size(),
  2210                     wrapper.obj_tag_p(),
  2211                     NULL, // referrer_tag_p
  2212                     len,
  2213                     (void*)user_data());
  2214   if (res & JVMTI_VISIT_ABORT) {
  2215     return false;// referrer class tag
  2217   if (res & JVMTI_VISIT_OBJECTS) {
  2218     check_for_visit(obj);
  2220   return true;
  2223 // report a reference from a thread stack to an object
  2224 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
  2225                                                                 jlong thread_tag,
  2226                                                                 jlong tid,
  2227                                                                 int depth,
  2228                                                                 jmethodID method,
  2229                                                                 jlocation bci,
  2230                                                                 jint slot,
  2231                                                                 oop obj) {
  2232   assert(ServiceUtil::visible_oop(obj), "checking");
  2234   AdvancedHeapWalkContext* context = advanced_context();
  2236   // check that callback is provider
  2237   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
  2238   if (cb == NULL) {
  2239     return check_for_visit(obj);
  2242   // apply class filter
  2243   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
  2244     return check_for_visit(obj);
  2247   // setup the callback wrapper
  2248   CallbackWrapper wrapper(tag_map(), obj);
  2250   // apply tag filter
  2251   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2252                                  wrapper.klass_tag(),
  2253                                  context->heap_filter())) {
  2254     return check_for_visit(obj);
  2257   // setup the referrer info
  2258   jvmtiHeapReferenceInfo reference_info;
  2259   reference_info.stack_local.thread_tag = thread_tag;
  2260   reference_info.stack_local.thread_id = tid;
  2261   reference_info.stack_local.depth = depth;
  2262   reference_info.stack_local.method = method;
  2263   reference_info.stack_local.location = bci;
  2264   reference_info.stack_local.slot = slot;
  2266   // for arrays we need the length, otherwise -1
  2267   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
  2269   // call into the agent
  2270   int res = (*cb)(ref_kind,
  2271                   &reference_info,
  2272                   wrapper.klass_tag(),
  2273                   0,    // referrer_class_tag is 0 for heap root (stack)
  2274                   wrapper.obj_size(),
  2275                   wrapper.obj_tag_p(),
  2276                   NULL, // referrer_tag is 0 for root
  2277                   len,
  2278                   (void*)user_data());
  2280   if (res & JVMTI_VISIT_ABORT) {
  2281     return false;
  2283   if (res & JVMTI_VISIT_OBJECTS) {
  2284     check_for_visit(obj);
  2286   return true;
  2289 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
  2290 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
  2291 #define REF_INFO_MASK  ((1 << JVMTI_HEAP_REFERENCE_FIELD)         \
  2292                       | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD)  \
  2293                       | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
  2294                       | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
  2295                       | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL)   \
  2296                       | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
  2298 // invoke the object reference callback to report a reference
  2299 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
  2300                                                                        oop referrer,
  2301                                                                        oop obj,
  2302                                                                        jint index)
  2304   // field index is only valid field in reference_info
  2305   static jvmtiHeapReferenceInfo reference_info = { 0 };
  2307   assert(ServiceUtil::visible_oop(referrer), "checking");
  2308   assert(ServiceUtil::visible_oop(obj), "checking");
  2310   AdvancedHeapWalkContext* context = advanced_context();
  2312   // check that callback is provider
  2313   jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
  2314   if (cb == NULL) {
  2315     return check_for_visit(obj);
  2318   // apply class filter
  2319   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
  2320     return check_for_visit(obj);
  2323   // setup the callback wrapper
  2324   TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
  2326   // apply tag filter
  2327   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2328                                  wrapper.klass_tag(),
  2329                                  context->heap_filter())) {
  2330     return check_for_visit(obj);
  2333   // field index is only valid field in reference_info
  2334   reference_info.field.index = index;
  2336   // for arrays we need the length, otherwise -1
  2337   jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
  2339   // invoke the callback
  2340   int res = (*cb)(ref_kind,
  2341                   (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
  2342                   wrapper.klass_tag(),
  2343                   wrapper.referrer_klass_tag(),
  2344                   wrapper.obj_size(),
  2345                   wrapper.obj_tag_p(),
  2346                   wrapper.referrer_tag_p(),
  2347                   len,
  2348                   (void*)user_data());
  2350   if (res & JVMTI_VISIT_ABORT) {
  2351     return false;
  2353   if (res & JVMTI_VISIT_OBJECTS) {
  2354     check_for_visit(obj);
  2356   return true;
  2359 // report a "simple root"
  2360 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
  2361   assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
  2362          kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
  2363   assert(ServiceUtil::visible_oop(obj), "checking");
  2365   if (is_basic_heap_walk()) {
  2366     // map to old style root kind
  2367     jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
  2368     return invoke_basic_heap_root_callback(root_kind, obj);
  2369   } else {
  2370     assert(is_advanced_heap_walk(), "wrong heap walk type");
  2371     return invoke_advanced_heap_root_callback(kind, obj);
  2376 // invoke the primitive array values
  2377 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
  2378   assert(obj->is_typeArray(), "not a primitive array");
  2380   AdvancedHeapWalkContext* context = advanced_context();
  2381   assert(context->array_primitive_value_callback() != NULL, "no callback");
  2383   // apply class filter
  2384   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
  2385     return true;
  2388   CallbackWrapper wrapper(tag_map(), obj);
  2390   // apply tag filter
  2391   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2392                                  wrapper.klass_tag(),
  2393                                  context->heap_filter())) {
  2394     return true;
  2397   // invoke the callback
  2398   int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
  2399                                                   &wrapper,
  2400                                                   obj,
  2401                                                   (void*)user_data());
  2402   return (!(res & JVMTI_VISIT_ABORT));
  2405 // invoke the string value callback
  2406 inline bool CallbackInvoker::report_string_value(oop str) {
  2407   assert(str->klass() == SystemDictionary::String_klass(), "not a string");
  2409   AdvancedHeapWalkContext* context = advanced_context();
  2410   assert(context->string_primitive_value_callback() != NULL, "no callback");
  2412   // apply class filter
  2413   if (is_filtered_by_klass_filter(str, context->klass_filter())) {
  2414     return true;
  2417   CallbackWrapper wrapper(tag_map(), str);
  2419   // apply tag filter
  2420   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2421                                  wrapper.klass_tag(),
  2422                                  context->heap_filter())) {
  2423     return true;
  2426   // invoke the callback
  2427   int res = invoke_string_value_callback(context->string_primitive_value_callback(),
  2428                                          &wrapper,
  2429                                          str,
  2430                                          (void*)user_data());
  2431   return (!(res & JVMTI_VISIT_ABORT));
  2434 // invoke the primitive field callback
  2435 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
  2436                                                     oop obj,
  2437                                                     jint index,
  2438                                                     address addr,
  2439                                                     char type)
  2441   // for primitive fields only the index will be set
  2442   static jvmtiHeapReferenceInfo reference_info = { 0 };
  2444   AdvancedHeapWalkContext* context = advanced_context();
  2445   assert(context->primitive_field_callback() != NULL, "no callback");
  2447   // apply class filter
  2448   if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
  2449     return true;
  2452   CallbackWrapper wrapper(tag_map(), obj);
  2454   // apply tag filter
  2455   if (is_filtered_by_heap_filter(wrapper.obj_tag(),
  2456                                  wrapper.klass_tag(),
  2457                                  context->heap_filter())) {
  2458     return true;
  2461   // the field index in the referrer
  2462   reference_info.field.index = index;
  2464   // map the type
  2465   jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
  2467   // setup the jvalue
  2468   jvalue value;
  2469   copy_to_jvalue(&value, addr, value_type);
  2471   jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
  2472   int res = (*cb)(ref_kind,
  2473                   &reference_info,
  2474                   wrapper.klass_tag(),
  2475                   wrapper.obj_tag_p(),
  2476                   value,
  2477                   value_type,
  2478                   (void*)user_data());
  2479   return (!(res & JVMTI_VISIT_ABORT));
  2483 // instance field
  2484 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
  2485                                                              jint index,
  2486                                                              address value,
  2487                                                              char type) {
  2488   return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
  2489                                 obj,
  2490                                 index,
  2491                                 value,
  2492                                 type);
  2495 // static field
  2496 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
  2497                                                            jint index,
  2498                                                            address value,
  2499                                                            char type) {
  2500   return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
  2501                                 obj,
  2502                                 index,
  2503                                 value,
  2504                                 type);
  2507 // report a JNI local (root object) to the profiler
  2508 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
  2509   if (is_basic_heap_walk()) {
  2510     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
  2511                                            thread_tag,
  2512                                            depth,
  2513                                            m,
  2514                                            -1,
  2515                                            obj);
  2516   } else {
  2517     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
  2518                                               thread_tag, tid,
  2519                                               depth,
  2520                                               m,
  2521                                               (jlocation)-1,
  2522                                               -1,
  2523                                               obj);
  2528 // report a local (stack reference, root object)
  2529 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
  2530                                                    jlong tid,
  2531                                                    jint depth,
  2532                                                    jmethodID method,
  2533                                                    jlocation bci,
  2534                                                    jint slot,
  2535                                                    oop obj) {
  2536   if (is_basic_heap_walk()) {
  2537     return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
  2538                                            thread_tag,
  2539                                            depth,
  2540                                            method,
  2541                                            slot,
  2542                                            obj);
  2543   } else {
  2544     return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
  2545                                               thread_tag,
  2546                                               tid,
  2547                                               depth,
  2548                                               method,
  2549                                               bci,
  2550                                               slot,
  2551                                               obj);
  2555 // report an object referencing a class.
  2556 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
  2557   if (is_basic_heap_walk()) {
  2558     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
  2559   } else {
  2560     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
  2564 // report a class referencing its class loader.
  2565 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
  2566   if (is_basic_heap_walk()) {
  2567     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
  2568   } else {
  2569     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
  2573 // report a class referencing its signers.
  2574 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
  2575   if (is_basic_heap_walk()) {
  2576     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
  2577   } else {
  2578     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
  2582 // report a class referencing its protection domain..
  2583 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
  2584   if (is_basic_heap_walk()) {
  2585     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
  2586   } else {
  2587     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
  2591 // report a class referencing its superclass.
  2592 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
  2593   if (is_basic_heap_walk()) {
  2594     // Send this to be consistent with past implementation
  2595     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
  2596   } else {
  2597     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
  2601 // report a class referencing one of its interfaces.
  2602 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
  2603   if (is_basic_heap_walk()) {
  2604     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
  2605   } else {
  2606     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
  2610 // report a class referencing one of its static fields.
  2611 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
  2612   if (is_basic_heap_walk()) {
  2613     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
  2614   } else {
  2615     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
  2619 // report an array referencing an element object
  2620 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
  2621   if (is_basic_heap_walk()) {
  2622     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
  2623   } else {
  2624     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
  2628 // report an object referencing an instance field object
  2629 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
  2630   if (is_basic_heap_walk()) {
  2631     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
  2632   } else {
  2633     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
  2637 // report an array referencing an element object
  2638 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
  2639   if (is_basic_heap_walk()) {
  2640     return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
  2641   } else {
  2642     return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
  2646 // A supporting closure used to process simple roots
  2647 class SimpleRootsClosure : public OopClosure {
  2648  private:
  2649   jvmtiHeapReferenceKind _kind;
  2650   bool _continue;
  2652   jvmtiHeapReferenceKind root_kind()    { return _kind; }
  2654  public:
  2655   void set_kind(jvmtiHeapReferenceKind kind) {
  2656     _kind = kind;
  2657     _continue = true;
  2660   inline bool stopped() {
  2661     return !_continue;
  2664   void do_oop(oop* obj_p) {
  2665     // iteration has terminated
  2666     if (stopped()) {
  2667       return;
  2670     // ignore null or deleted handles
  2671     oop o = *obj_p;
  2672     if (o == NULL || o == JNIHandles::deleted_handle()) {
  2673       return;
  2676     jvmtiHeapReferenceKind kind = root_kind();
  2678     // many roots are Klasses so we use the java mirror
  2679     if (o->is_klass()) {
  2680       klassOop k = (klassOop)o;
  2681       o = Klass::cast(k)->java_mirror();
  2682     } else {
  2684       // SystemDictionary::always_strong_oops_do reports the application
  2685       // class loader as a root. We want this root to be reported as
  2686       // a root kind of "OTHER" rather than "SYSTEM_CLASS".
  2687       if (o->is_instance() && root_kind() == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
  2688         kind = JVMTI_HEAP_REFERENCE_OTHER;
  2692     // some objects are ignored - in the case of simple
  2693     // roots it's mostly symbolOops that we are skipping
  2694     // here.
  2695     if (!ServiceUtil::visible_oop(o)) {
  2696       return;
  2699     // invoke the callback
  2700     _continue = CallbackInvoker::report_simple_root(kind, o);
  2703   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
  2704 };
  2706 // A supporting closure used to process JNI locals
  2707 class JNILocalRootsClosure : public OopClosure {
  2708  private:
  2709   jlong _thread_tag;
  2710   jlong _tid;
  2711   jint _depth;
  2712   jmethodID _method;
  2713   bool _continue;
  2714  public:
  2715   void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
  2716     _thread_tag = thread_tag;
  2717     _tid = tid;
  2718     _depth = depth;
  2719     _method = method;
  2720     _continue = true;
  2723   inline bool stopped() {
  2724     return !_continue;
  2727   void do_oop(oop* obj_p) {
  2728     // iteration has terminated
  2729     if (stopped()) {
  2730       return;
  2733     // ignore null or deleted handles
  2734     oop o = *obj_p;
  2735     if (o == NULL || o == JNIHandles::deleted_handle()) {
  2736       return;
  2739     if (!ServiceUtil::visible_oop(o)) {
  2740       return;
  2743     // invoke the callback
  2744     _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
  2746   virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
  2747 };
  2750 // A VM operation to iterate over objects that are reachable from
  2751 // a set of roots or an initial object.
  2752 //
  2753 // For VM_HeapWalkOperation the set of roots used is :-
  2754 //
  2755 // - All JNI global references
  2756 // - All inflated monitors
  2757 // - All classes loaded by the boot class loader (or all classes
  2758 //     in the event that class unloading is disabled)
  2759 // - All java threads
  2760 // - For each java thread then all locals and JNI local references
  2761 //      on the thread's execution stack
  2762 // - All visible/explainable objects from Universes::oops_do
  2763 //
  2764 class VM_HeapWalkOperation: public VM_Operation {
  2765  private:
  2766   enum {
  2767     initial_visit_stack_size = 4000
  2768   };
  2770   bool _is_advanced_heap_walk;                      // indicates FollowReferences
  2771   JvmtiTagMap* _tag_map;
  2772   Handle _initial_object;
  2773   GrowableArray<oop>* _visit_stack;                 // the visit stack
  2775   bool _collecting_heap_roots;                      // are we collecting roots
  2776   bool _following_object_refs;                      // are we following object references
  2778   bool _reporting_primitive_fields;                 // optional reporting
  2779   bool _reporting_primitive_array_values;
  2780   bool _reporting_string_values;
  2782   GrowableArray<oop>* create_visit_stack() {
  2783     return new (ResourceObj::C_HEAP) GrowableArray<oop>(initial_visit_stack_size, true);
  2786   // accessors
  2787   bool is_advanced_heap_walk() const               { return _is_advanced_heap_walk; }
  2788   JvmtiTagMap* tag_map() const                     { return _tag_map; }
  2789   Handle initial_object() const                    { return _initial_object; }
  2791   bool is_following_references() const             { return _following_object_refs; }
  2793   bool is_reporting_primitive_fields()  const      { return _reporting_primitive_fields; }
  2794   bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
  2795   bool is_reporting_string_values() const          { return _reporting_string_values; }
  2797   GrowableArray<oop>* visit_stack() const          { return _visit_stack; }
  2799   // iterate over the various object types
  2800   inline bool iterate_over_array(oop o);
  2801   inline bool iterate_over_type_array(oop o);
  2802   inline bool iterate_over_class(klassOop o);
  2803   inline bool iterate_over_object(oop o);
  2805   // root collection
  2806   inline bool collect_simple_roots();
  2807   inline bool collect_stack_roots();
  2808   inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
  2810   // visit an object
  2811   inline bool visit(oop o);
  2813  public:
  2814   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
  2815                        Handle initial_object,
  2816                        BasicHeapWalkContext callbacks,
  2817                        const void* user_data);
  2819   VM_HeapWalkOperation(JvmtiTagMap* tag_map,
  2820                        Handle initial_object,
  2821                        AdvancedHeapWalkContext callbacks,
  2822                        const void* user_data);
  2824   ~VM_HeapWalkOperation();
  2826   VMOp_Type type() const { return VMOp_HeapWalkOperation; }
  2827   void doit();
  2828 };
  2831 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
  2832                                            Handle initial_object,
  2833                                            BasicHeapWalkContext callbacks,
  2834                                            const void* user_data) {
  2835   _is_advanced_heap_walk = false;
  2836   _tag_map = tag_map;
  2837   _initial_object = initial_object;
  2838   _following_object_refs = (callbacks.object_ref_callback() != NULL);
  2839   _reporting_primitive_fields = false;
  2840   _reporting_primitive_array_values = false;
  2841   _reporting_string_values = false;
  2842   _visit_stack = create_visit_stack();
  2845   CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
  2848 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
  2849                                            Handle initial_object,
  2850                                            AdvancedHeapWalkContext callbacks,
  2851                                            const void* user_data) {
  2852   _is_advanced_heap_walk = true;
  2853   _tag_map = tag_map;
  2854   _initial_object = initial_object;
  2855   _following_object_refs = true;
  2856   _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
  2857   _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
  2858   _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
  2859   _visit_stack = create_visit_stack();
  2861   CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
  2864 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
  2865   if (_following_object_refs) {
  2866     assert(_visit_stack != NULL, "checking");
  2867     delete _visit_stack;
  2868     _visit_stack = NULL;
  2872 // an array references its class and has a reference to
  2873 // each element in the array
  2874 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
  2875   objArrayOop array = objArrayOop(o);
  2876   if (array->klass() == Universe::systemObjArrayKlassObj()) {
  2877     // filtered out
  2878     return true;
  2881   // array reference to its class
  2882   oop mirror = objArrayKlass::cast(array->klass())->java_mirror();
  2883   if (!CallbackInvoker::report_class_reference(o, mirror)) {
  2884     return false;
  2887   // iterate over the array and report each reference to a
  2888   // non-null element
  2889   for (int index=0; index<array->length(); index++) {
  2890     oop elem = array->obj_at(index);
  2891     if (elem == NULL) {
  2892       continue;
  2895     // report the array reference o[index] = elem
  2896     if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
  2897       return false;
  2900   return true;
  2903 // a type array references its class
  2904 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
  2905   klassOop k = o->klass();
  2906   oop mirror = Klass::cast(k)->java_mirror();
  2907   if (!CallbackInvoker::report_class_reference(o, mirror)) {
  2908     return false;
  2911   // report the array contents if required
  2912   if (is_reporting_primitive_array_values()) {
  2913     if (!CallbackInvoker::report_primitive_array_values(o)) {
  2914       return false;
  2917   return true;
  2920 // verify that a static oop field is in range
  2921 static inline bool verify_static_oop(instanceKlass* ik,
  2922                                      klassOop k, int offset) {
  2923   address obj_p = (address)k + offset;
  2924   address start = (address)ik->start_of_static_fields();
  2925   address end = start + (ik->static_oop_field_size() * heapOopSize);
  2926   assert(end >= start, "sanity check");
  2928   if (obj_p >= start && obj_p < end) {
  2929     return true;
  2930   } else {
  2931     return false;
  2935 // a class references its super class, interfaces, class loader, ...
  2936 // and finally its static fields
  2937 inline bool VM_HeapWalkOperation::iterate_over_class(klassOop k) {
  2938   int i;
  2939   Klass* klass = klassOop(k)->klass_part();
  2941   if (klass->oop_is_instance()) {
  2942     instanceKlass* ik = instanceKlass::cast(k);
  2944     // ignore the class if it's has been initialized yet
  2945     if (!ik->is_linked()) {
  2946       return true;
  2949     // get the java mirror
  2950     oop mirror = klass->java_mirror();
  2952     // super (only if something more interesting than java.lang.Object)
  2953     klassOop java_super = ik->java_super();
  2954     if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
  2955       oop super = Klass::cast(java_super)->java_mirror();
  2956       if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
  2957         return false;
  2961     // class loader
  2962     oop cl = ik->class_loader();
  2963     if (cl != NULL) {
  2964       if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
  2965         return false;
  2969     // protection domain
  2970     oop pd = ik->protection_domain();
  2971     if (pd != NULL) {
  2972       if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
  2973         return false;
  2977     // signers
  2978     oop signers = ik->signers();
  2979     if (signers != NULL) {
  2980       if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
  2981         return false;
  2985     // references from the constant pool
  2987       const constantPoolOop pool = ik->constants();
  2988       for (int i = 1; i < pool->length(); i++) {
  2989         constantTag tag = pool->tag_at(i).value();
  2990         if (tag.is_string() || tag.is_klass()) {
  2991           oop entry;
  2992           if (tag.is_string()) {
  2993             entry = pool->resolved_string_at(i);
  2994             assert(java_lang_String::is_instance(entry), "must be string");
  2995           } else {
  2996             entry = Klass::cast(pool->resolved_klass_at(i))->java_mirror();
  2998           if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
  2999             return false;
  3005     // interfaces
  3006     // (These will already have been reported as references from the constant pool
  3007     //  but are specified by IterateOverReachableObjects and must be reported).
  3008     objArrayOop interfaces = ik->local_interfaces();
  3009     for (i = 0; i < interfaces->length(); i++) {
  3010       oop interf = Klass::cast((klassOop)interfaces->obj_at(i))->java_mirror();
  3011       if (interf == NULL) {
  3012         continue;
  3014       if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
  3015         return false;
  3019     // iterate over the static fields
  3021     ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
  3022     for (i=0; i<field_map->field_count(); i++) {
  3023       ClassFieldDescriptor* field = field_map->field_at(i);
  3024       char type = field->field_type();
  3025       if (!is_primitive_field_type(type)) {
  3026         oop fld_o = k->obj_field(field->field_offset());
  3027         assert(verify_static_oop(ik, k, field->field_offset()), "sanity check");
  3028         if (fld_o != NULL) {
  3029           int slot = field->field_index();
  3030           if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
  3031             delete field_map;
  3032             return false;
  3035       } else {
  3036          if (is_reporting_primitive_fields()) {
  3037            address addr = (address)k + field->field_offset();
  3038            int slot = field->field_index();
  3039            if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
  3040              delete field_map;
  3041              return false;
  3046     delete field_map;
  3048     return true;
  3051   return true;
  3054 // an object references a class and its instance fields
  3055 // (static fields are ignored here as we report these as
  3056 // references from the class).
  3057 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
  3058   // reference to the class
  3059   if (!CallbackInvoker::report_class_reference(o, Klass::cast(o->klass())->java_mirror())) {
  3060     return false;
  3063   // iterate over instance fields
  3064   ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
  3065   for (int i=0; i<field_map->field_count(); i++) {
  3066     ClassFieldDescriptor* field = field_map->field_at(i);
  3067     char type = field->field_type();
  3068     if (!is_primitive_field_type(type)) {
  3069       oop fld_o = o->obj_field(field->field_offset());
  3070       if (fld_o != NULL) {
  3071         // reflection code may have a reference to a klassOop.
  3072         // - see sun.reflect.UnsafeStaticFieldAccessorImpl and sun.misc.Unsafe
  3073         if (fld_o->is_klass()) {
  3074           klassOop k = (klassOop)fld_o;
  3075           fld_o = Klass::cast(k)->java_mirror();
  3077         int slot = field->field_index();
  3078         if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
  3079           return false;
  3082     } else {
  3083       if (is_reporting_primitive_fields()) {
  3084         // primitive instance field
  3085         address addr = (address)o + field->field_offset();
  3086         int slot = field->field_index();
  3087         if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
  3088           return false;
  3094   // if the object is a java.lang.String
  3095   if (is_reporting_string_values() &&
  3096       o->klass() == SystemDictionary::String_klass()) {
  3097     if (!CallbackInvoker::report_string_value(o)) {
  3098       return false;
  3101   return true;
  3105 // collects all simple (non-stack) roots.
  3106 // if there's a heap root callback provided then the callback is
  3107 // invoked for each simple root.
  3108 // if an object reference callback is provided then all simple
  3109 // roots are pushed onto the marking stack so that they can be
  3110 // processed later
  3111 //
  3112 inline bool VM_HeapWalkOperation::collect_simple_roots() {
  3113   SimpleRootsClosure blk;
  3115   // JNI globals
  3116   blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
  3117   JNIHandles::oops_do(&blk);
  3118   if (blk.stopped()) {
  3119     return false;
  3122   // Preloaded classes and loader from the system dictionary
  3123   blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
  3124   SystemDictionary::always_strong_oops_do(&blk);
  3125   if (blk.stopped()) {
  3126     return false;
  3129   // Inflated monitors
  3130   blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
  3131   ObjectSynchronizer::oops_do(&blk);
  3132   if (blk.stopped()) {
  3133     return false;
  3136   // Threads
  3137   for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
  3138     oop threadObj = thread->threadObj();
  3139     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
  3140       bool cont = CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, threadObj);
  3141       if (!cont) {
  3142         return false;
  3147   // Other kinds of roots maintained by HotSpot
  3148   // Many of these won't be visible but others (such as instances of important
  3149   // exceptions) will be visible.
  3150   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
  3151   Universe::oops_do(&blk);
  3153   // If there are any non-perm roots in the code cache, visit them.
  3154   blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
  3155   CodeBlobToOopClosure look_in_blobs(&blk, false);
  3156   CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
  3158   return true;
  3161 // Walk the stack of a given thread and find all references (locals
  3162 // and JNI calls) and report these as stack references
  3163 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
  3164                                                       JNILocalRootsClosure* blk)
  3166   oop threadObj = java_thread->threadObj();
  3167   assert(threadObj != NULL, "sanity check");
  3169   // only need to get the thread's tag once per thread
  3170   jlong thread_tag = tag_for(_tag_map, threadObj);
  3172   // also need the thread id
  3173   jlong tid = java_lang_Thread::thread_id(threadObj);
  3176   if (java_thread->has_last_Java_frame()) {
  3178     // vframes are resource allocated
  3179     Thread* current_thread = Thread::current();
  3180     ResourceMark rm(current_thread);
  3181     HandleMark hm(current_thread);
  3183     RegisterMap reg_map(java_thread);
  3184     frame f = java_thread->last_frame();
  3185     vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
  3187     bool is_top_frame = true;
  3188     int depth = 0;
  3189     frame* last_entry_frame = NULL;
  3191     while (vf != NULL) {
  3192       if (vf->is_java_frame()) {
  3194         // java frame (interpreted, compiled, ...)
  3195         javaVFrame *jvf = javaVFrame::cast(vf);
  3197         // the jmethodID
  3198         jmethodID method = jvf->method()->jmethod_id();
  3200         if (!(jvf->method()->is_native())) {
  3201           jlocation bci = (jlocation)jvf->bci();
  3202           StackValueCollection* locals = jvf->locals();
  3203           for (int slot=0; slot<locals->size(); slot++) {
  3204             if (locals->at(slot)->type() == T_OBJECT) {
  3205               oop o = locals->obj_at(slot)();
  3206               if (o == NULL) {
  3207                 continue;
  3210               // stack reference
  3211               if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
  3212                                                    bci, slot, o)) {
  3213                 return false;
  3217         } else {
  3218           blk->set_context(thread_tag, tid, depth, method);
  3219           if (is_top_frame) {
  3220             // JNI locals for the top frame.
  3221             java_thread->active_handles()->oops_do(blk);
  3222           } else {
  3223             if (last_entry_frame != NULL) {
  3224               // JNI locals for the entry frame
  3225               assert(last_entry_frame->is_entry_frame(), "checking");
  3226               last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
  3230         last_entry_frame = NULL;
  3231         depth++;
  3232       } else {
  3233         // externalVFrame - for an entry frame then we report the JNI locals
  3234         // when we find the corresponding javaVFrame
  3235         frame* fr = vf->frame_pointer();
  3236         assert(fr != NULL, "sanity check");
  3237         if (fr->is_entry_frame()) {
  3238           last_entry_frame = fr;
  3242       vf = vf->sender();
  3243       is_top_frame = false;
  3245   } else {
  3246     // no last java frame but there may be JNI locals
  3247     blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
  3248     java_thread->active_handles()->oops_do(blk);
  3250   return true;
  3254 // collects all stack roots - for each thread it walks the execution
  3255 // stack to find all references and local JNI refs.
  3256 inline bool VM_HeapWalkOperation::collect_stack_roots() {
  3257   JNILocalRootsClosure blk;
  3258   for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
  3259     oop threadObj = thread->threadObj();
  3260     if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
  3261       if (!collect_stack_roots(thread, &blk)) {
  3262         return false;
  3266   return true;
  3269 // visit an object
  3270 // first mark the object as visited
  3271 // second get all the outbound references from this object (in other words, all
  3272 // the objects referenced by this object).
  3273 //
  3274 bool VM_HeapWalkOperation::visit(oop o) {
  3275   // mark object as visited
  3276   assert(!ObjectMarker::visited(o), "can't visit same object more than once");
  3277   ObjectMarker::mark(o);
  3279   // instance
  3280   if (o->is_instance()) {
  3281     if (o->klass() == SystemDictionary::Class_klass()) {
  3282       o = klassOop_if_java_lang_Class(o);
  3283       if (o->is_klass()) {
  3284         // a java.lang.Class
  3285         return iterate_over_class(klassOop(o));
  3287     } else {
  3288       return iterate_over_object(o);
  3292   // object array
  3293   if (o->is_objArray()) {
  3294     return iterate_over_array(o);
  3297   // type array
  3298   if (o->is_typeArray()) {
  3299     return iterate_over_type_array(o);
  3302   return true;
  3305 void VM_HeapWalkOperation::doit() {
  3306   ResourceMark rm;
  3307   ObjectMarkerController marker;
  3308   ClassFieldMapCacheMark cm;
  3310   assert(visit_stack()->is_empty(), "visit stack must be empty");
  3312   // the heap walk starts with an initial object or the heap roots
  3313   if (initial_object().is_null()) {
  3314     if (!collect_simple_roots()) return;
  3315     if (!collect_stack_roots()) return;
  3316   } else {
  3317     visit_stack()->push(initial_object()());
  3320   // object references required
  3321   if (is_following_references()) {
  3323     // visit each object until all reachable objects have been
  3324     // visited or the callback asked to terminate the iteration.
  3325     while (!visit_stack()->is_empty()) {
  3326       oop o = visit_stack()->pop();
  3327       if (!ObjectMarker::visited(o)) {
  3328         if (!visit(o)) {
  3329           break;
  3336 // iterate over all objects that are reachable from a set of roots
  3337 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
  3338                                                  jvmtiStackReferenceCallback stack_ref_callback,
  3339                                                  jvmtiObjectReferenceCallback object_ref_callback,
  3340                                                  const void* user_data) {
  3341   MutexLocker ml(Heap_lock);
  3342   BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
  3343   VM_HeapWalkOperation op(this, Handle(), context, user_data);
  3344   VMThread::execute(&op);
  3347 // iterate over all objects that are reachable from a given object
  3348 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
  3349                                                              jvmtiObjectReferenceCallback object_ref_callback,
  3350                                                              const void* user_data) {
  3351   oop obj = JNIHandles::resolve(object);
  3352   Handle initial_object(Thread::current(), obj);
  3354   MutexLocker ml(Heap_lock);
  3355   BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
  3356   VM_HeapWalkOperation op(this, initial_object, context, user_data);
  3357   VMThread::execute(&op);
  3360 // follow references from an initial object or the GC roots
  3361 void JvmtiTagMap::follow_references(jint heap_filter,
  3362                                     KlassHandle klass,
  3363                                     jobject object,
  3364                                     const jvmtiHeapCallbacks* callbacks,
  3365                                     const void* user_data)
  3367   oop obj = JNIHandles::resolve(object);
  3368   Handle initial_object(Thread::current(), obj);
  3370   MutexLocker ml(Heap_lock);
  3371   AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
  3372   VM_HeapWalkOperation op(this, initial_object, context, user_data);
  3373   VMThread::execute(&op);
  3377 // called post-GC
  3378 // - for each JVMTI environment with an object tag map, call its rehash
  3379 // function to re-sync with the new object locations.
  3380 void JvmtiTagMap::gc_epilogue(bool full) {
  3381   assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
  3382   if (JvmtiEnv::environments_might_exist()) {
  3383     // re-obtain the memory region for the young generation (might
  3384     // changed due to adaptive resizing policy)
  3385     get_young_generation();
  3387     JvmtiEnvIterator it;
  3388     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
  3389       JvmtiTagMap* tag_map = env->tag_map();
  3390       if (tag_map != NULL && !tag_map->is_empty()) {
  3391         TraceTime t(full ? "JVMTI Full Rehash " : "JVMTI Rehash ", TraceJVMTIObjectTagging);
  3392         if (full) {
  3393           tag_map->rehash(0, n_hashmaps);
  3394         } else {
  3395           tag_map->rehash(0, 0);        // tag map for young gen only
  3402 // CMS has completed referencing processing so we may have JNI weak refs
  3403 // to objects in the CMS generation that have been GC'ed.
  3404 void JvmtiTagMap::cms_ref_processing_epilogue() {
  3405   assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
  3406   assert(UseConcMarkSweepGC, "should only be used with CMS");
  3407   if (JvmtiEnv::environments_might_exist()) {
  3408     JvmtiEnvIterator it;
  3409     for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
  3410       JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
  3411       if (tag_map != NULL && !tag_map->is_empty()) {
  3412         TraceTime t("JVMTI Rehash (CMS) ", TraceJVMTIObjectTagging);
  3413         tag_map->rehash(1, n_hashmaps);    // assume CMS not used in young gen
  3420 // For each entry in the hashmaps 'start' to 'end' :
  3421 //
  3422 // 1. resolve the JNI weak reference
  3423 //
  3424 // 2. If it resolves to NULL it means the object has been freed so the entry
  3425 //    is removed, the weak reference destroyed, and the object free event is
  3426 //    posted (if enabled).
  3427 //
  3428 // 3. If the weak reference resolves to an object then we re-hash the object
  3429 //    to see if it has moved or has been promoted (from the young to the old
  3430 //    generation for example).
  3431 //
  3432 void JvmtiTagMap::rehash(int start, int end) {
  3434   // does this environment have the OBJECT_FREE event enabled
  3435   bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
  3437   // counters used for trace message
  3438   int freed = 0;
  3439   int moved = 0;
  3440   int promoted = 0;
  3442   // we assume there are two hashmaps - one for the young generation
  3443   // and the other for all other spaces.
  3444   assert(n_hashmaps == 2, "not implemented");
  3445   JvmtiTagHashmap* young_hashmap = _hashmap[0];
  3446   JvmtiTagHashmap* other_hashmap = _hashmap[1];
  3448   // reenable sizing (if disabled)
  3449   young_hashmap->set_resizing_enabled(true);
  3450   other_hashmap->set_resizing_enabled(true);
  3452   // when re-hashing the hashmap corresponding to the young generation we
  3453   // collect the entries corresponding to objects that have been promoted.
  3454   JvmtiTagHashmapEntry* promoted_entries = NULL;
  3456   if (end >= n_hashmaps) {
  3457     end = n_hashmaps - 1;
  3460   for (int i=start; i <= end; i++) {
  3461     JvmtiTagHashmap* hashmap = _hashmap[i];
  3463     // if the hashmap is empty then we can skip it
  3464     if (hashmap->_entry_count == 0) {
  3465       continue;
  3468     // now iterate through each entry in the table
  3470     JvmtiTagHashmapEntry** table = hashmap->table();
  3471     int size = hashmap->size();
  3473     for (int pos=0; pos<size; pos++) {
  3474       JvmtiTagHashmapEntry* entry = table[pos];
  3475       JvmtiTagHashmapEntry* prev = NULL;
  3477       while (entry != NULL) {
  3478         JvmtiTagHashmapEntry* next = entry->next();
  3480         jweak ref = entry->object();
  3481         oop oop = JNIHandles::resolve(ref);
  3483         // has object been GC'ed
  3484         if (oop == NULL) {
  3485           // grab the tag
  3486           jlong tag = entry->tag();
  3487           guarantee(tag != 0, "checking");
  3489           // remove GC'ed entry from hashmap and return the
  3490           // entry to the free list
  3491           hashmap->remove(prev, pos, entry);
  3492           destroy_entry(entry);
  3494           // destroy the weak ref
  3495           JNIHandles::destroy_weak_global(ref);
  3497           // post the event to the profiler
  3498           if (post_object_free) {
  3499             JvmtiExport::post_object_free(env(), tag);
  3502           freed++;
  3503           entry = next;
  3504           continue;
  3507         // if this is the young hashmap then the object is either promoted
  3508         // or moved.
  3509         // if this is the other hashmap then the object is moved.
  3511         bool same_gen;
  3512         if (i == 0) {
  3513           assert(hashmap == young_hashmap, "checking");
  3514           same_gen = is_in_young(oop);
  3515         } else {
  3516           same_gen = true;
  3520         if (same_gen) {
  3521           // if the object has moved then re-hash it and move its
  3522           // entry to its new location.
  3523           unsigned int new_pos = JvmtiTagHashmap::hash(oop, size);
  3524           if (new_pos != (unsigned int)pos) {
  3525             if (prev == NULL) {
  3526               table[pos] = next;
  3527             } else {
  3528               prev->set_next(next);
  3530             entry->set_next(table[new_pos]);
  3531             table[new_pos] = entry;
  3532             moved++;
  3533           } else {
  3534             // object didn't move
  3535             prev = entry;
  3537         } else {
  3538           // object has been promoted so remove the entry from the
  3539           // young hashmap
  3540           assert(hashmap == young_hashmap, "checking");
  3541           hashmap->remove(prev, pos, entry);
  3543           // move the entry to the promoted list
  3544           entry->set_next(promoted_entries);
  3545           promoted_entries = entry;
  3548         entry = next;
  3554   // add the entries, corresponding to the promoted objects, to the
  3555   // other hashmap.
  3556   JvmtiTagHashmapEntry* entry = promoted_entries;
  3557   while (entry != NULL) {
  3558     oop o = JNIHandles::resolve(entry->object());
  3559     assert(hashmap_for(o) == other_hashmap, "checking");
  3560     JvmtiTagHashmapEntry* next = entry->next();
  3561     other_hashmap->add(o, entry);
  3562     entry = next;
  3563     promoted++;
  3566   // stats
  3567   if (TraceJVMTIObjectTagging) {
  3568     int total_moves = promoted + moved;
  3570     int post_total = 0;
  3571     for (int i=0; i<n_hashmaps; i++) {
  3572       post_total += _hashmap[i]->_entry_count;
  3574     int pre_total = post_total + freed;
  3576     tty->print("(%d->%d, %d freed, %d promoted, %d total moves)",
  3577         pre_total, post_total, freed, promoted, total_moves);

mercurial