src/share/vm/prims/jvmtiTagMap.cpp

Fri, 07 Jan 2011 10:42:32 -0500

author
phh
date
Fri, 07 Jan 2011 10:42:32 -0500
changeset 2423
b1a2afa37ec4
parent 2314
f95d63e2154a
child 2445
7246a374a9f2
permissions
-rw-r--r--

7003271: Hotspot should track cumulative Java heap bytes allocated on a per-thread basis
Summary: Track allocated bytes in Thread's, update on TLAB retirement and direct allocation in Eden and tenured, add JNI methods for ThreadMXBean.
Reviewed-by: coleenp, kvn, dholmes, ysr

duke@435 1 /*
stefank@2314 2 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/symbolTable.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "classfile/vmSymbols.hpp"
stefank@2314 29 #include "jvmtifiles/jvmtiEnv.hpp"
stefank@2314 30 #include "oops/objArrayKlass.hpp"
stefank@2314 31 #include "oops/oop.inline2.hpp"
stefank@2314 32 #include "prims/jvmtiEventController.hpp"
stefank@2314 33 #include "prims/jvmtiEventController.inline.hpp"
stefank@2314 34 #include "prims/jvmtiExport.hpp"
stefank@2314 35 #include "prims/jvmtiImpl.hpp"
stefank@2314 36 #include "prims/jvmtiTagMap.hpp"
stefank@2314 37 #include "runtime/biasedLocking.hpp"
stefank@2314 38 #include "runtime/javaCalls.hpp"
stefank@2314 39 #include "runtime/jniHandles.hpp"
stefank@2314 40 #include "runtime/mutex.hpp"
stefank@2314 41 #include "runtime/mutexLocker.hpp"
stefank@2314 42 #include "runtime/reflectionUtils.hpp"
stefank@2314 43 #include "runtime/vframe.hpp"
stefank@2314 44 #include "runtime/vmThread.hpp"
stefank@2314 45 #include "runtime/vm_operations.hpp"
stefank@2314 46 #include "services/serviceUtil.hpp"
stefank@2314 47 #ifndef SERIALGC
stefank@2314 48 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
stefank@2314 49 #endif
duke@435 50
duke@435 51 // JvmtiTagHashmapEntry
duke@435 52 //
duke@435 53 // Each entry encapsulates a JNI weak reference to the tagged object
duke@435 54 // and the tag value. In addition an entry includes a next pointer which
duke@435 55 // is used to chain entries together.
duke@435 56
duke@435 57 class JvmtiTagHashmapEntry : public CHeapObj {
duke@435 58 private:
duke@435 59 friend class JvmtiTagMap;
duke@435 60
duke@435 61 jweak _object; // JNI weak ref to tagged object
duke@435 62 jlong _tag; // the tag
duke@435 63 JvmtiTagHashmapEntry* _next; // next on the list
duke@435 64
duke@435 65 inline void init(jweak object, jlong tag) {
duke@435 66 _object = object;
duke@435 67 _tag = tag;
duke@435 68 _next = NULL;
duke@435 69 }
duke@435 70
duke@435 71 // constructor
duke@435 72 JvmtiTagHashmapEntry(jweak object, jlong tag) { init(object, tag); }
duke@435 73
duke@435 74 public:
duke@435 75
duke@435 76 // accessor methods
duke@435 77 inline jweak object() const { return _object; }
duke@435 78 inline jlong tag() const { return _tag; }
duke@435 79
duke@435 80 inline void set_tag(jlong tag) {
duke@435 81 assert(tag != 0, "can't be zero");
duke@435 82 _tag = tag;
duke@435 83 }
duke@435 84
duke@435 85 inline JvmtiTagHashmapEntry* next() const { return _next; }
duke@435 86 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
duke@435 87 };
duke@435 88
duke@435 89
duke@435 90 // JvmtiTagHashmap
duke@435 91 //
duke@435 92 // A hashmap is essentially a table of pointers to entries. Entries
duke@435 93 // are hashed to a location, or position in the table, and then
duke@435 94 // chained from that location. The "key" for hashing is address of
duke@435 95 // the object, or oop. The "value" is the JNI weak reference to the
duke@435 96 // object and the tag value. Keys are not stored with the entry.
duke@435 97 // Instead the weak reference is resolved to obtain the key.
duke@435 98 //
duke@435 99 // A hashmap maintains a count of the number entries in the hashmap
duke@435 100 // and resizes if the number of entries exceeds a given threshold.
duke@435 101 // The threshold is specified as a percentage of the size - for
duke@435 102 // example a threshold of 0.75 will trigger the hashmap to resize
duke@435 103 // if the number of entries is >75% of table size.
duke@435 104 //
duke@435 105 // A hashmap provides functions for adding, removing, and finding
duke@435 106 // entries. It also provides a function to iterate over all entries
duke@435 107 // in the hashmap.
duke@435 108
duke@435 109 class JvmtiTagHashmap : public CHeapObj {
duke@435 110 private:
duke@435 111 friend class JvmtiTagMap;
duke@435 112
duke@435 113 enum {
duke@435 114 small_trace_threshold = 10000, // threshold for tracing
duke@435 115 medium_trace_threshold = 100000,
duke@435 116 large_trace_threshold = 1000000,
duke@435 117 initial_trace_threshold = small_trace_threshold
duke@435 118 };
duke@435 119
duke@435 120 static int _sizes[]; // array of possible hashmap sizes
duke@435 121 int _size; // actual size of the table
duke@435 122 int _size_index; // index into size table
duke@435 123
duke@435 124 int _entry_count; // number of entries in the hashmap
duke@435 125
duke@435 126 float _load_factor; // load factor as a % of the size
duke@435 127 int _resize_threshold; // computed threshold to trigger resizing.
duke@435 128 bool _resizing_enabled; // indicates if hashmap can resize
duke@435 129
duke@435 130 int _trace_threshold; // threshold for trace messages
duke@435 131
duke@435 132 JvmtiTagHashmapEntry** _table; // the table of entries.
duke@435 133
duke@435 134 // private accessors
duke@435 135 int resize_threshold() const { return _resize_threshold; }
duke@435 136 int trace_threshold() const { return _trace_threshold; }
duke@435 137
duke@435 138 // initialize the hashmap
duke@435 139 void init(int size_index=0, float load_factor=4.0f) {
duke@435 140 int initial_size = _sizes[size_index];
duke@435 141 _size_index = size_index;
duke@435 142 _size = initial_size;
duke@435 143 _entry_count = 0;
duke@435 144 if (TraceJVMTIObjectTagging) {
duke@435 145 _trace_threshold = initial_trace_threshold;
duke@435 146 } else {
duke@435 147 _trace_threshold = -1;
duke@435 148 }
duke@435 149 _load_factor = load_factor;
duke@435 150 _resize_threshold = (int)(_load_factor * _size);
duke@435 151 _resizing_enabled = true;
duke@435 152 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
duke@435 153 _table = (JvmtiTagHashmapEntry**)os::malloc(s);
duke@435 154 if (_table == NULL) {
duke@435 155 vm_exit_out_of_memory(s, "unable to allocate initial hashtable for jvmti object tags");
duke@435 156 }
duke@435 157 for (int i=0; i<initial_size; i++) {
duke@435 158 _table[i] = NULL;
duke@435 159 }
duke@435 160 }
duke@435 161
duke@435 162 // hash a given key (oop) with the specified size
duke@435 163 static unsigned int hash(oop key, int size) {
duke@435 164 // shift right to get better distribution (as these bits will be zero
duke@435 165 // with aligned addresses)
duke@435 166 unsigned int addr = (unsigned int)((intptr_t)key);
duke@435 167 #ifdef _LP64
duke@435 168 return (addr >> 3) % size;
duke@435 169 #else
duke@435 170 return (addr >> 2) % size;
duke@435 171 #endif
duke@435 172 }
duke@435 173
duke@435 174 // hash a given key (oop)
duke@435 175 unsigned int hash(oop key) {
duke@435 176 return hash(key, _size);
duke@435 177 }
duke@435 178
duke@435 179 // resize the hashmap - allocates a large table and re-hashes
duke@435 180 // all entries into the new table.
duke@435 181 void resize() {
duke@435 182 int new_size_index = _size_index+1;
duke@435 183 int new_size = _sizes[new_size_index];
duke@435 184 if (new_size < 0) {
duke@435 185 // hashmap already at maximum capacity
duke@435 186 return;
duke@435 187 }
duke@435 188
duke@435 189 // allocate new table
duke@435 190 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
duke@435 191 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s);
duke@435 192 if (new_table == NULL) {
duke@435 193 warning("unable to allocate larger hashtable for jvmti object tags");
duke@435 194 set_resizing_enabled(false);
duke@435 195 return;
duke@435 196 }
duke@435 197
duke@435 198 // initialize new table
duke@435 199 int i;
duke@435 200 for (i=0; i<new_size; i++) {
duke@435 201 new_table[i] = NULL;
duke@435 202 }
duke@435 203
duke@435 204 // rehash all entries into the new table
duke@435 205 for (i=0; i<_size; i++) {
duke@435 206 JvmtiTagHashmapEntry* entry = _table[i];
duke@435 207 while (entry != NULL) {
duke@435 208 JvmtiTagHashmapEntry* next = entry->next();
duke@435 209 oop key = JNIHandles::resolve(entry->object());
duke@435 210 assert(key != NULL, "jni weak reference cleared!!");
duke@435 211 unsigned int h = hash(key, new_size);
duke@435 212 JvmtiTagHashmapEntry* anchor = new_table[h];
duke@435 213 if (anchor == NULL) {
duke@435 214 new_table[h] = entry;
duke@435 215 entry->set_next(NULL);
duke@435 216 } else {
duke@435 217 entry->set_next(anchor);
duke@435 218 new_table[h] = entry;
duke@435 219 }
duke@435 220 entry = next;
duke@435 221 }
duke@435 222 }
duke@435 223
duke@435 224 // free old table and update settings.
duke@435 225 os::free((void*)_table);
duke@435 226 _table = new_table;
duke@435 227 _size_index = new_size_index;
duke@435 228 _size = new_size;
duke@435 229
duke@435 230 // compute new resize threshold
duke@435 231 _resize_threshold = (int)(_load_factor * _size);
duke@435 232 }
duke@435 233
duke@435 234
duke@435 235 // internal remove function - remove an entry at a given position in the
duke@435 236 // table.
duke@435 237 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
duke@435 238 assert(pos >= 0 && pos < _size, "out of range");
duke@435 239 if (prev == NULL) {
duke@435 240 _table[pos] = entry->next();
duke@435 241 } else {
duke@435 242 prev->set_next(entry->next());
duke@435 243 }
duke@435 244 assert(_entry_count > 0, "checking");
duke@435 245 _entry_count--;
duke@435 246 }
duke@435 247
duke@435 248 // resizing switch
duke@435 249 bool is_resizing_enabled() const { return _resizing_enabled; }
duke@435 250 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; }
duke@435 251
duke@435 252 // debugging
duke@435 253 void print_memory_usage();
duke@435 254 void compute_next_trace_threshold();
duke@435 255
duke@435 256 public:
duke@435 257
duke@435 258 // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
duke@435 259 // The preferred size is rounded down to an actual size.
duke@435 260 JvmtiTagHashmap(int size, float load_factor=0.0f) {
duke@435 261 int i=0;
duke@435 262 while (_sizes[i] < size) {
duke@435 263 if (_sizes[i] < 0) {
duke@435 264 assert(i > 0, "sanity check");
duke@435 265 i--;
duke@435 266 break;
duke@435 267 }
duke@435 268 i++;
duke@435 269 }
duke@435 270
duke@435 271 // if a load factor is specified then use it, otherwise use default
duke@435 272 if (load_factor > 0.01f) {
duke@435 273 init(i, load_factor);
duke@435 274 } else {
duke@435 275 init(i);
duke@435 276 }
duke@435 277 }
duke@435 278
duke@435 279 // create a JvmtiTagHashmap with default settings
duke@435 280 JvmtiTagHashmap() {
duke@435 281 init();
duke@435 282 }
duke@435 283
duke@435 284 // release table when JvmtiTagHashmap destroyed
duke@435 285 ~JvmtiTagHashmap() {
duke@435 286 if (_table != NULL) {
duke@435 287 os::free((void*)_table);
duke@435 288 _table = NULL;
duke@435 289 }
duke@435 290 }
duke@435 291
duke@435 292 // accessors
duke@435 293 int size() const { return _size; }
duke@435 294 JvmtiTagHashmapEntry** table() const { return _table; }
duke@435 295 int entry_count() const { return _entry_count; }
duke@435 296
duke@435 297 // find an entry in the hashmap, returns NULL if not found.
duke@435 298 inline JvmtiTagHashmapEntry* find(oop key) {
duke@435 299 unsigned int h = hash(key);
duke@435 300 JvmtiTagHashmapEntry* entry = _table[h];
duke@435 301 while (entry != NULL) {
duke@435 302 oop orig_key = JNIHandles::resolve(entry->object());
duke@435 303 assert(orig_key != NULL, "jni weak reference cleared!!");
duke@435 304 if (key == orig_key) {
duke@435 305 break;
duke@435 306 }
duke@435 307 entry = entry->next();
duke@435 308 }
duke@435 309 return entry;
duke@435 310 }
duke@435 311
duke@435 312
duke@435 313 // add a new entry to hashmap
duke@435 314 inline void add(oop key, JvmtiTagHashmapEntry* entry) {
duke@435 315 assert(key != NULL, "checking");
duke@435 316 assert(find(key) == NULL, "duplicate detected");
duke@435 317 unsigned int h = hash(key);
duke@435 318 JvmtiTagHashmapEntry* anchor = _table[h];
duke@435 319 if (anchor == NULL) {
duke@435 320 _table[h] = entry;
duke@435 321 entry->set_next(NULL);
duke@435 322 } else {
duke@435 323 entry->set_next(anchor);
duke@435 324 _table[h] = entry;
duke@435 325 }
duke@435 326
duke@435 327 _entry_count++;
duke@435 328 if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
duke@435 329 assert(TraceJVMTIObjectTagging, "should only get here when tracing");
duke@435 330 print_memory_usage();
duke@435 331 compute_next_trace_threshold();
duke@435 332 }
duke@435 333
duke@435 334 // if the number of entries exceed the threshold then resize
duke@435 335 if (entry_count() > resize_threshold() && is_resizing_enabled()) {
duke@435 336 resize();
duke@435 337 }
duke@435 338 }
duke@435 339
duke@435 340 // remove an entry with the given key.
duke@435 341 inline JvmtiTagHashmapEntry* remove(oop key) {
duke@435 342 unsigned int h = hash(key);
duke@435 343 JvmtiTagHashmapEntry* entry = _table[h];
duke@435 344 JvmtiTagHashmapEntry* prev = NULL;
duke@435 345 while (entry != NULL) {
duke@435 346 oop orig_key = JNIHandles::resolve(entry->object());
duke@435 347 assert(orig_key != NULL, "jni weak reference cleared!!");
duke@435 348 if (key == orig_key) {
duke@435 349 break;
duke@435 350 }
duke@435 351 prev = entry;
duke@435 352 entry = entry->next();
duke@435 353 }
duke@435 354 if (entry != NULL) {
duke@435 355 remove(prev, h, entry);
duke@435 356 }
duke@435 357 return entry;
duke@435 358 }
duke@435 359
duke@435 360 // iterate over all entries in the hashmap
duke@435 361 void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
duke@435 362 };
duke@435 363
duke@435 364 // possible hashmap sizes - odd primes that roughly double in size.
duke@435 365 // To avoid excessive resizing the odd primes from 4801-76831 and
duke@435 366 // 76831-307261 have been removed. The list must be terminated by -1.
duke@435 367 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891,
duke@435 368 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
duke@435 369
duke@435 370
duke@435 371 // A supporting class for iterating over all entries in Hashmap
duke@435 372 class JvmtiTagHashmapEntryClosure {
duke@435 373 public:
duke@435 374 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
duke@435 375 };
duke@435 376
duke@435 377
duke@435 378 // iterate over all entries in the hashmap
duke@435 379 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
duke@435 380 for (int i=0; i<_size; i++) {
duke@435 381 JvmtiTagHashmapEntry* entry = _table[i];
duke@435 382 JvmtiTagHashmapEntry* prev = NULL;
duke@435 383 while (entry != NULL) {
duke@435 384 // obtain the next entry before invoking do_entry - this is
duke@435 385 // necessary because do_entry may remove the entry from the
duke@435 386 // hashmap.
duke@435 387 JvmtiTagHashmapEntry* next = entry->next();
duke@435 388 closure->do_entry(entry);
duke@435 389 entry = next;
duke@435 390 }
duke@435 391 }
duke@435 392 }
duke@435 393
duke@435 394 // debugging
duke@435 395 void JvmtiTagHashmap::print_memory_usage() {
duke@435 396 intptr_t p = (intptr_t)this;
duke@435 397 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
duke@435 398
duke@435 399 // table + entries in KB
duke@435 400 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
duke@435 401 entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
duke@435 402
duke@435 403 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
duke@435 404 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
duke@435 405 entry_count(), hashmap_usage, weak_globals_usage);
duke@435 406 }
duke@435 407
duke@435 408 // compute threshold for the next trace message
duke@435 409 void JvmtiTagHashmap::compute_next_trace_threshold() {
duke@435 410 if (trace_threshold() < medium_trace_threshold) {
duke@435 411 _trace_threshold += small_trace_threshold;
duke@435 412 } else {
duke@435 413 if (trace_threshold() < large_trace_threshold) {
duke@435 414 _trace_threshold += medium_trace_threshold;
duke@435 415 } else {
duke@435 416 _trace_threshold += large_trace_threshold;
duke@435 417 }
duke@435 418 }
duke@435 419 }
duke@435 420
duke@435 421 // memory region for young generation
duke@435 422 MemRegion JvmtiTagMap::_young_gen;
duke@435 423
duke@435 424 // get the memory region used for the young generation
duke@435 425 void JvmtiTagMap::get_young_generation() {
ysr@777 426 CollectedHeap* ch = Universe::heap();
ysr@777 427 switch (ch->kind()) {
ysr@777 428 case (CollectedHeap::GenCollectedHeap): {
ysr@777 429 _young_gen = ((GenCollectedHeap*)ch)->get_gen(0)->reserved();
ysr@777 430 break;
ysr@777 431 }
duke@435 432 #ifndef SERIALGC
ysr@777 433 case (CollectedHeap::ParallelScavengeHeap): {
ysr@777 434 _young_gen = ((ParallelScavengeHeap*)ch)->young_gen()->reserved();
ysr@777 435 break;
ysr@777 436 }
ysr@777 437 case (CollectedHeap::G1CollectedHeap): {
ysr@777 438 // Until a more satisfactory solution is implemented, all
ysr@777 439 // oops in the tag map will require rehash at each gc.
ysr@777 440 // This is a correct, if extremely inefficient solution.
ysr@777 441 // See RFE 6621729 for related commentary.
ysr@777 442 _young_gen = ch->reserved_region();
ysr@777 443 break;
ysr@777 444 }
ysr@777 445 #endif // !SERIALGC
ysr@777 446 default:
ysr@777 447 ShouldNotReachHere();
duke@435 448 }
duke@435 449 }
duke@435 450
duke@435 451 // returns true if oop is in the young generation
duke@435 452 inline bool JvmtiTagMap::is_in_young(oop o) {
duke@435 453 assert(_young_gen.start() != NULL, "checking");
duke@435 454 void* p = (void*)o;
duke@435 455 bool in_young = _young_gen.contains(p);
duke@435 456 return in_young;
duke@435 457 }
duke@435 458
duke@435 459 // returns the appropriate hashmap for a given object
duke@435 460 inline JvmtiTagHashmap* JvmtiTagMap::hashmap_for(oop o) {
duke@435 461 if (is_in_young(o)) {
duke@435 462 return _hashmap[0];
duke@435 463 } else {
duke@435 464 return _hashmap[1];
duke@435 465 }
duke@435 466 }
duke@435 467
duke@435 468
duke@435 469 // create a JvmtiTagMap
duke@435 470 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
duke@435 471 _env(env),
duke@435 472 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
duke@435 473 _free_entries(NULL),
duke@435 474 _free_entries_count(0)
duke@435 475 {
duke@435 476 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
duke@435 477 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
duke@435 478
duke@435 479 // create the hashmaps
duke@435 480 for (int i=0; i<n_hashmaps; i++) {
duke@435 481 _hashmap[i] = new JvmtiTagHashmap();
duke@435 482 }
duke@435 483
duke@435 484 // get the memory region used by the young generation
duke@435 485 get_young_generation();
duke@435 486
duke@435 487 // finally add us to the environment
duke@435 488 ((JvmtiEnvBase *)env)->set_tag_map(this);
duke@435 489 }
duke@435 490
duke@435 491
duke@435 492 // destroy a JvmtiTagMap
duke@435 493 JvmtiTagMap::~JvmtiTagMap() {
duke@435 494
duke@435 495 // no lock acquired as we assume the enclosing environment is
duke@435 496 // also being destroryed.
duke@435 497 ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
duke@435 498
duke@435 499 // iterate over the hashmaps and destroy each of the entries
duke@435 500 for (int i=0; i<n_hashmaps; i++) {
duke@435 501 JvmtiTagHashmap* hashmap = _hashmap[i];
duke@435 502 JvmtiTagHashmapEntry** table = hashmap->table();
duke@435 503 for (int j=0; j<hashmap->size(); j++) {
duke@435 504 JvmtiTagHashmapEntry *entry = table[j];
duke@435 505 while (entry != NULL) {
duke@435 506 JvmtiTagHashmapEntry* next = entry->next();
duke@435 507 jweak ref = entry->object();
duke@435 508 JNIHandles::destroy_weak_global(ref);
duke@435 509 delete entry;
duke@435 510 entry = next;
duke@435 511 }
duke@435 512 }
duke@435 513
duke@435 514 // finally destroy the hashmap
duke@435 515 delete hashmap;
duke@435 516 }
duke@435 517
duke@435 518 // remove any entries on the free list
duke@435 519 JvmtiTagHashmapEntry* entry = _free_entries;
duke@435 520 while (entry != NULL) {
duke@435 521 JvmtiTagHashmapEntry* next = entry->next();
duke@435 522 delete entry;
duke@435 523 entry = next;
duke@435 524 }
duke@435 525 }
duke@435 526
duke@435 527 // create a hashmap entry
duke@435 528 // - if there's an entry on the (per-environment) free list then this
duke@435 529 // is returned. Otherwise an new entry is allocated.
duke@435 530 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(jweak ref, jlong tag) {
duke@435 531 assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
duke@435 532 JvmtiTagHashmapEntry* entry;
duke@435 533 if (_free_entries == NULL) {
duke@435 534 entry = new JvmtiTagHashmapEntry(ref, tag);
duke@435 535 } else {
duke@435 536 assert(_free_entries_count > 0, "mismatched _free_entries_count");
duke@435 537 _free_entries_count--;
duke@435 538 entry = _free_entries;
duke@435 539 _free_entries = entry->next();
duke@435 540 entry->init(ref, tag);
duke@435 541 }
duke@435 542 return entry;
duke@435 543 }
duke@435 544
duke@435 545 // destroy an entry by returning it to the free list
duke@435 546 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
duke@435 547 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
duke@435 548 // limit the size of the free list
duke@435 549 if (_free_entries_count >= max_free_entries) {
duke@435 550 delete entry;
duke@435 551 } else {
duke@435 552 entry->set_next(_free_entries);
duke@435 553 _free_entries = entry;
duke@435 554 _free_entries_count++;
duke@435 555 }
duke@435 556 }
duke@435 557
duke@435 558 // returns the tag map for the given environments. If the tag map
duke@435 559 // doesn't exist then it is created.
duke@435 560 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
duke@435 561 JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
duke@435 562 if (tag_map == NULL) {
duke@435 563 MutexLocker mu(JvmtiThreadState_lock);
duke@435 564 tag_map = ((JvmtiEnvBase *)env)->tag_map();
duke@435 565 if (tag_map == NULL) {
duke@435 566 tag_map = new JvmtiTagMap(env);
duke@435 567 }
duke@435 568 } else {
duke@435 569 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
duke@435 570 }
duke@435 571 return tag_map;
duke@435 572 }
duke@435 573
duke@435 574 // iterate over all entries in the tag map.
duke@435 575 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
duke@435 576 for (int i=0; i<n_hashmaps; i++) {
duke@435 577 JvmtiTagHashmap* hashmap = _hashmap[i];
duke@435 578 hashmap->entry_iterate(closure);
duke@435 579 }
duke@435 580 }
duke@435 581
duke@435 582 // returns true if the hashmaps are empty
duke@435 583 bool JvmtiTagMap::is_empty() {
duke@435 584 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
duke@435 585 assert(n_hashmaps == 2, "not implemented");
duke@435 586 return ((_hashmap[0]->entry_count() == 0) && (_hashmap[1]->entry_count() == 0));
duke@435 587 }
duke@435 588
duke@435 589
duke@435 590 // Return the tag value for an object, or 0 if the object is
duke@435 591 // not tagged
duke@435 592 //
duke@435 593 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
duke@435 594 JvmtiTagHashmapEntry* entry = tag_map->hashmap_for(o)->find(o);
duke@435 595 if (entry == NULL) {
duke@435 596 return 0;
duke@435 597 } else {
duke@435 598 return entry->tag();
duke@435 599 }
duke@435 600 }
duke@435 601
duke@435 602 // If the object is a java.lang.Class then return the klassOop,
duke@435 603 // otherwise return the original object
duke@435 604 static inline oop klassOop_if_java_lang_Class(oop o) {
never@1577 605 if (o->klass() == SystemDictionary::Class_klass()) {
duke@435 606 if (!java_lang_Class::is_primitive(o)) {
duke@435 607 o = (oop)java_lang_Class::as_klassOop(o);
duke@435 608 assert(o != NULL, "class for non-primitive mirror must exist");
duke@435 609 }
duke@435 610 }
duke@435 611 return o;
duke@435 612 }
duke@435 613
duke@435 614 // A CallbackWrapper is a support class for querying and tagging an object
duke@435 615 // around a callback to a profiler. The constructor does pre-callback
duke@435 616 // work to get the tag value, klass tag value, ... and the destructor
duke@435 617 // does the post-callback work of tagging or untagging the object.
duke@435 618 //
duke@435 619 // {
duke@435 620 // CallbackWrapper wrapper(tag_map, o);
duke@435 621 //
duke@435 622 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
duke@435 623 //
duke@435 624 // } // wrapper goes out of scope here which results in the destructor
duke@435 625 // checking to see if the object has been tagged, untagged, or the
duke@435 626 // tag value has changed.
duke@435 627 //
duke@435 628 class CallbackWrapper : public StackObj {
duke@435 629 private:
duke@435 630 JvmtiTagMap* _tag_map;
duke@435 631 JvmtiTagHashmap* _hashmap;
duke@435 632 JvmtiTagHashmapEntry* _entry;
duke@435 633 oop _o;
duke@435 634 jlong _obj_size;
duke@435 635 jlong _obj_tag;
duke@435 636 klassOop _klass; // the object's class
duke@435 637 jlong _klass_tag;
duke@435 638
duke@435 639 protected:
duke@435 640 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@435 641
duke@435 642 // invoked post-callback to tag, untag, or update the tag of an object
duke@435 643 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
duke@435 644 JvmtiTagHashmapEntry* entry, jlong obj_tag);
duke@435 645 public:
duke@435 646 CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
duke@435 647 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
duke@435 648 "MT unsafe or must be VM thread");
duke@435 649
duke@435 650 // for Classes the klassOop is tagged
duke@435 651 _o = klassOop_if_java_lang_Class(o);
duke@435 652
duke@435 653 // object size
duke@435 654 _obj_size = _o->size() * wordSize;
duke@435 655
duke@435 656 // record the context
duke@435 657 _tag_map = tag_map;
duke@435 658 _hashmap = tag_map->hashmap_for(_o);
duke@435 659 _entry = _hashmap->find(_o);
duke@435 660
duke@435 661 // get object tag
duke@435 662 _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
duke@435 663
duke@435 664 // get the class and the class's tag value
duke@435 665 if (_o == o) {
duke@435 666 _klass = _o->klass();
duke@435 667 } else {
duke@435 668 // if the object represents a runtime class then use the
duke@435 669 // tag for java.lang.Class
never@1577 670 _klass = SystemDictionary::Class_klass();
duke@435 671 }
duke@435 672 _klass_tag = tag_for(tag_map, _klass);
duke@435 673 }
duke@435 674
duke@435 675 ~CallbackWrapper() {
duke@435 676 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
duke@435 677 }
duke@435 678
duke@435 679 inline jlong* obj_tag_p() { return &_obj_tag; }
duke@435 680 inline jlong obj_size() const { return _obj_size; }
duke@435 681 inline jlong obj_tag() const { return _obj_tag; }
duke@435 682 inline klassOop klass() const { return _klass; }
duke@435 683 inline jlong klass_tag() const { return _klass_tag; }
duke@435 684 };
duke@435 685
duke@435 686
duke@435 687
duke@435 688 // callback post-callback to tag, untag, or update the tag of an object
duke@435 689 void inline CallbackWrapper::post_callback_tag_update(oop o,
duke@435 690 JvmtiTagHashmap* hashmap,
duke@435 691 JvmtiTagHashmapEntry* entry,
duke@435 692 jlong obj_tag) {
duke@435 693 if (entry == NULL) {
duke@435 694 if (obj_tag != 0) {
duke@435 695 // callback has tagged the object
duke@435 696 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@435 697 HandleMark hm;
duke@435 698 Handle h(o);
duke@435 699 jweak ref = JNIHandles::make_weak_global(h);
duke@435 700 entry = tag_map()->create_entry(ref, obj_tag);
duke@435 701 hashmap->add(o, entry);
duke@435 702 }
duke@435 703 } else {
duke@435 704 // object was previously tagged - the callback may have untagged
duke@435 705 // the object or changed the tag value
duke@435 706 if (obj_tag == 0) {
duke@435 707 jweak ref = entry->object();
duke@435 708
duke@435 709 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
duke@435 710 assert(entry_removed == entry, "checking");
duke@435 711 tag_map()->destroy_entry(entry);
duke@435 712
duke@435 713 JNIHandles::destroy_weak_global(ref);
duke@435 714 } else {
duke@435 715 if (obj_tag != entry->tag()) {
duke@435 716 entry->set_tag(obj_tag);
duke@435 717 }
duke@435 718 }
duke@435 719 }
duke@435 720 }
duke@435 721
duke@435 722 // An extended CallbackWrapper used when reporting an object reference
duke@435 723 // to the agent.
duke@435 724 //
duke@435 725 // {
duke@435 726 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
duke@435 727 //
duke@435 728 // (*callback)(wrapper.klass_tag(),
duke@435 729 // wrapper.obj_size(),
duke@435 730 // wrapper.obj_tag_p()
duke@435 731 // wrapper.referrer_tag_p(), ...)
duke@435 732 //
duke@435 733 // } // wrapper goes out of scope here which results in the destructor
duke@435 734 // checking to see if the referrer object has been tagged, untagged,
duke@435 735 // or the tag value has changed.
duke@435 736 //
duke@435 737 class TwoOopCallbackWrapper : public CallbackWrapper {
duke@435 738 private:
duke@435 739 bool _is_reference_to_self;
duke@435 740 JvmtiTagHashmap* _referrer_hashmap;
duke@435 741 JvmtiTagHashmapEntry* _referrer_entry;
duke@435 742 oop _referrer;
duke@435 743 jlong _referrer_obj_tag;
duke@435 744 jlong _referrer_klass_tag;
duke@435 745 jlong* _referrer_tag_p;
duke@435 746
duke@435 747 bool is_reference_to_self() const { return _is_reference_to_self; }
duke@435 748
duke@435 749 public:
duke@435 750 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
duke@435 751 CallbackWrapper(tag_map, o)
duke@435 752 {
duke@435 753 // self reference needs to be handled in a special way
duke@435 754 _is_reference_to_self = (referrer == o);
duke@435 755
duke@435 756 if (_is_reference_to_self) {
duke@435 757 _referrer_klass_tag = klass_tag();
duke@435 758 _referrer_tag_p = obj_tag_p();
duke@435 759 } else {
duke@435 760 // for Classes the klassOop is tagged
duke@435 761 _referrer = klassOop_if_java_lang_Class(referrer);
duke@435 762 // record the context
duke@435 763 _referrer_hashmap = tag_map->hashmap_for(_referrer);
duke@435 764 _referrer_entry = _referrer_hashmap->find(_referrer);
duke@435 765
duke@435 766 // get object tag
duke@435 767 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
duke@435 768 _referrer_tag_p = &_referrer_obj_tag;
duke@435 769
duke@435 770 // get referrer class tag.
duke@435 771 klassOop k = (_referrer == referrer) ? // Check if referrer is a class...
duke@435 772 _referrer->klass() // No, just get its class
never@1577 773 : SystemDictionary::Class_klass(); // Yes, its class is Class
duke@435 774 _referrer_klass_tag = tag_for(tag_map, k);
duke@435 775 }
duke@435 776 }
duke@435 777
duke@435 778 ~TwoOopCallbackWrapper() {
duke@435 779 if (!is_reference_to_self()){
duke@435 780 post_callback_tag_update(_referrer,
duke@435 781 _referrer_hashmap,
duke@435 782 _referrer_entry,
duke@435 783 _referrer_obj_tag);
duke@435 784 }
duke@435 785 }
duke@435 786
duke@435 787 // address of referrer tag
duke@435 788 // (for a self reference this will return the same thing as obj_tag_p())
duke@435 789 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
duke@435 790
duke@435 791 // referrer's class tag
duke@435 792 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
duke@435 793 };
duke@435 794
duke@435 795 // tag an object
duke@435 796 //
duke@435 797 // This function is performance critical. If many threads attempt to tag objects
duke@435 798 // around the same time then it's possible that the Mutex associated with the
duke@435 799 // tag map will be a hot lock. Eliminating this lock will not eliminate the issue
duke@435 800 // because creating a JNI weak reference requires acquiring a global lock also.
duke@435 801 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
duke@435 802 MutexLocker ml(lock());
duke@435 803
duke@435 804 // resolve the object
duke@435 805 oop o = JNIHandles::resolve_non_null(object);
duke@435 806
duke@435 807 // for Classes we tag the klassOop
duke@435 808 o = klassOop_if_java_lang_Class(o);
duke@435 809
duke@435 810 // see if the object is already tagged
duke@435 811 JvmtiTagHashmap* hashmap = hashmap_for(o);
duke@435 812 JvmtiTagHashmapEntry* entry = hashmap->find(o);
duke@435 813
duke@435 814 // if the object is not already tagged then we tag it
duke@435 815 if (entry == NULL) {
duke@435 816 if (tag != 0) {
duke@435 817 HandleMark hm;
duke@435 818 Handle h(o);
duke@435 819 jweak ref = JNIHandles::make_weak_global(h);
duke@435 820
duke@435 821 // the object may have moved because make_weak_global may
duke@435 822 // have blocked - thus it is necessary resolve the handle
duke@435 823 // and re-hash the object.
duke@435 824 o = h();
duke@435 825 entry = create_entry(ref, tag);
duke@435 826 hashmap_for(o)->add(o, entry);
duke@435 827 } else {
duke@435 828 // no-op
duke@435 829 }
duke@435 830 } else {
duke@435 831 // if the object is already tagged then we either update
duke@435 832 // the tag (if a new tag value has been provided)
duke@435 833 // or remove the object if the new tag value is 0.
duke@435 834 // Removing the object requires that we also delete the JNI
duke@435 835 // weak ref to the object.
duke@435 836 if (tag == 0) {
duke@435 837 jweak ref = entry->object();
duke@435 838 hashmap->remove(o);
duke@435 839 destroy_entry(entry);
duke@435 840 JNIHandles::destroy_weak_global(ref);
duke@435 841 } else {
duke@435 842 entry->set_tag(tag);
duke@435 843 }
duke@435 844 }
duke@435 845 }
duke@435 846
duke@435 847 // get the tag for an object
duke@435 848 jlong JvmtiTagMap::get_tag(jobject object) {
duke@435 849 MutexLocker ml(lock());
duke@435 850
duke@435 851 // resolve the object
duke@435 852 oop o = JNIHandles::resolve_non_null(object);
duke@435 853
duke@435 854 // for Classes get the tag from the klassOop
duke@435 855 return tag_for(this, klassOop_if_java_lang_Class(o));
duke@435 856 }
duke@435 857
duke@435 858
duke@435 859 // Helper class used to describe the static or instance fields of a class.
duke@435 860 // For each field it holds the field index (as defined by the JVMTI specification),
duke@435 861 // the field type, and the offset.
duke@435 862
duke@435 863 class ClassFieldDescriptor: public CHeapObj {
duke@435 864 private:
duke@435 865 int _field_index;
duke@435 866 int _field_offset;
duke@435 867 char _field_type;
duke@435 868 public:
duke@435 869 ClassFieldDescriptor(int index, char type, int offset) :
duke@435 870 _field_index(index), _field_type(type), _field_offset(offset) {
duke@435 871 }
duke@435 872 int field_index() const { return _field_index; }
duke@435 873 char field_type() const { return _field_type; }
duke@435 874 int field_offset() const { return _field_offset; }
duke@435 875 };
duke@435 876
duke@435 877 class ClassFieldMap: public CHeapObj {
duke@435 878 private:
duke@435 879 enum {
duke@435 880 initial_field_count = 5
duke@435 881 };
duke@435 882
duke@435 883 // list of field descriptors
duke@435 884 GrowableArray<ClassFieldDescriptor*>* _fields;
duke@435 885
duke@435 886 // constructor
duke@435 887 ClassFieldMap();
duke@435 888
duke@435 889 // add a field
duke@435 890 void add(int index, char type, int offset);
duke@435 891
duke@435 892 // returns the field count for the given class
duke@435 893 static int compute_field_count(instanceKlassHandle ikh);
duke@435 894
duke@435 895 public:
duke@435 896 ~ClassFieldMap();
duke@435 897
duke@435 898 // access
duke@435 899 int field_count() { return _fields->length(); }
duke@435 900 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
duke@435 901
duke@435 902 // functions to create maps of static or instance fields
duke@435 903 static ClassFieldMap* create_map_of_static_fields(klassOop k);
duke@435 904 static ClassFieldMap* create_map_of_instance_fields(oop obj);
duke@435 905 };
duke@435 906
duke@435 907 ClassFieldMap::ClassFieldMap() {
duke@435 908 _fields = new (ResourceObj::C_HEAP) GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
duke@435 909 }
duke@435 910
duke@435 911 ClassFieldMap::~ClassFieldMap() {
duke@435 912 for (int i=0; i<_fields->length(); i++) {
duke@435 913 delete _fields->at(i);
duke@435 914 }
duke@435 915 delete _fields;
duke@435 916 }
duke@435 917
duke@435 918 void ClassFieldMap::add(int index, char type, int offset) {
duke@435 919 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
duke@435 920 _fields->append(field);
duke@435 921 }
duke@435 922
duke@435 923 // Returns a heap allocated ClassFieldMap to describe the static fields
duke@435 924 // of the given class.
duke@435 925 //
duke@435 926 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(klassOop k) {
duke@435 927 HandleMark hm;
duke@435 928 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
duke@435 929
duke@435 930 // create the field map
duke@435 931 ClassFieldMap* field_map = new ClassFieldMap();
duke@435 932
duke@435 933 FilteredFieldStream f(ikh, false, false);
duke@435 934 int max_field_index = f.field_count()-1;
duke@435 935
duke@435 936 int index = 0;
duke@435 937 for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
duke@435 938 // ignore instance fields
duke@435 939 if (!fld.access_flags().is_static()) {
duke@435 940 continue;
duke@435 941 }
duke@435 942 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
duke@435 943 }
duke@435 944 return field_map;
duke@435 945 }
duke@435 946
duke@435 947 // Returns a heap allocated ClassFieldMap to describe the instance fields
duke@435 948 // of the given class. All instance fields are included (this means public
duke@435 949 // and private fields declared in superclasses and superinterfaces too).
duke@435 950 //
duke@435 951 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
duke@435 952 HandleMark hm;
duke@435 953 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
duke@435 954
duke@435 955 // create the field map
duke@435 956 ClassFieldMap* field_map = new ClassFieldMap();
duke@435 957
duke@435 958 FilteredFieldStream f(ikh, false, false);
duke@435 959
duke@435 960 int max_field_index = f.field_count()-1;
duke@435 961
duke@435 962 int index = 0;
duke@435 963 for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
duke@435 964 // ignore static fields
duke@435 965 if (fld.access_flags().is_static()) {
duke@435 966 continue;
duke@435 967 }
duke@435 968 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
duke@435 969 }
duke@435 970
duke@435 971 return field_map;
duke@435 972 }
duke@435 973
duke@435 974 // Helper class used to cache a ClassFileMap for the instance fields of
duke@435 975 // a cache. A JvmtiCachedClassFieldMap can be cached by an instanceKlass during
duke@435 976 // heap iteration and avoid creating a field map for each object in the heap
duke@435 977 // (only need to create the map when the first instance of a class is encountered).
duke@435 978 //
duke@435 979 class JvmtiCachedClassFieldMap : public CHeapObj {
duke@435 980 private:
duke@435 981 enum {
duke@435 982 initial_class_count = 200
duke@435 983 };
duke@435 984 ClassFieldMap* _field_map;
duke@435 985
duke@435 986 ClassFieldMap* field_map() const { return _field_map; }
duke@435 987
duke@435 988 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
duke@435 989 ~JvmtiCachedClassFieldMap();
duke@435 990
duke@435 991 static GrowableArray<instanceKlass*>* _class_list;
duke@435 992 static void add_to_class_list(instanceKlass* ik);
duke@435 993
duke@435 994 public:
duke@435 995 // returns the field map for a given object (returning map cached
duke@435 996 // by instanceKlass if possible
duke@435 997 static ClassFieldMap* get_map_of_instance_fields(oop obj);
duke@435 998
duke@435 999 // removes the field map from all instanceKlasses - should be
duke@435 1000 // called before VM operation completes
duke@435 1001 static void clear_cache();
duke@435 1002
duke@435 1003 // returns the number of ClassFieldMap cached by instanceKlasses
duke@435 1004 static int cached_field_map_count();
duke@435 1005 };
duke@435 1006
duke@435 1007 GrowableArray<instanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
duke@435 1008
duke@435 1009 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
duke@435 1010 _field_map = field_map;
duke@435 1011 }
duke@435 1012
duke@435 1013 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
duke@435 1014 if (_field_map != NULL) {
duke@435 1015 delete _field_map;
duke@435 1016 }
duke@435 1017 }
duke@435 1018
duke@435 1019 // Marker class to ensure that the class file map cache is only used in a defined
duke@435 1020 // scope.
duke@435 1021 class ClassFieldMapCacheMark : public StackObj {
duke@435 1022 private:
duke@435 1023 static bool _is_active;
duke@435 1024 public:
duke@435 1025 ClassFieldMapCacheMark() {
duke@435 1026 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@435 1027 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
duke@435 1028 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
duke@435 1029 _is_active = true;
duke@435 1030 }
duke@435 1031 ~ClassFieldMapCacheMark() {
duke@435 1032 JvmtiCachedClassFieldMap::clear_cache();
duke@435 1033 _is_active = false;
duke@435 1034 }
duke@435 1035 static bool is_active() { return _is_active; }
duke@435 1036 };
duke@435 1037
duke@435 1038 bool ClassFieldMapCacheMark::_is_active;
duke@435 1039
duke@435 1040
duke@435 1041 // record that the given instanceKlass is caching a field map
duke@435 1042 void JvmtiCachedClassFieldMap::add_to_class_list(instanceKlass* ik) {
duke@435 1043 if (_class_list == NULL) {
duke@435 1044 _class_list = new (ResourceObj::C_HEAP) GrowableArray<instanceKlass*>(initial_class_count, true);
duke@435 1045 }
duke@435 1046 _class_list->push(ik);
duke@435 1047 }
duke@435 1048
duke@435 1049 // returns the instance field map for the given object
duke@435 1050 // (returns field map cached by the instanceKlass if possible)
duke@435 1051 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
duke@435 1052 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@435 1053 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
duke@435 1054
duke@435 1055 klassOop k = obj->klass();
duke@435 1056 instanceKlass* ik = instanceKlass::cast(k);
duke@435 1057
duke@435 1058 // return cached map if possible
duke@435 1059 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
duke@435 1060 if (cached_map != NULL) {
duke@435 1061 assert(cached_map->field_map() != NULL, "missing field list");
duke@435 1062 return cached_map->field_map();
duke@435 1063 } else {
duke@435 1064 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
duke@435 1065 cached_map = new JvmtiCachedClassFieldMap(field_map);
duke@435 1066 ik->set_jvmti_cached_class_field_map(cached_map);
duke@435 1067 add_to_class_list(ik);
duke@435 1068 return field_map;
duke@435 1069 }
duke@435 1070 }
duke@435 1071
duke@435 1072 // remove the fields maps cached from all instanceKlasses
duke@435 1073 void JvmtiCachedClassFieldMap::clear_cache() {
duke@435 1074 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@435 1075 if (_class_list != NULL) {
duke@435 1076 for (int i = 0; i < _class_list->length(); i++) {
duke@435 1077 instanceKlass* ik = _class_list->at(i);
duke@435 1078 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
duke@435 1079 assert(cached_map != NULL, "should not be NULL");
duke@435 1080 ik->set_jvmti_cached_class_field_map(NULL);
duke@435 1081 delete cached_map; // deletes the encapsulated field map
duke@435 1082 }
duke@435 1083 delete _class_list;
duke@435 1084 _class_list = NULL;
duke@435 1085 }
duke@435 1086 }
duke@435 1087
duke@435 1088 // returns the number of ClassFieldMap cached by instanceKlasses
duke@435 1089 int JvmtiCachedClassFieldMap::cached_field_map_count() {
duke@435 1090 return (_class_list == NULL) ? 0 : _class_list->length();
duke@435 1091 }
duke@435 1092
duke@435 1093 // helper function to indicate if an object is filtered by its tag or class tag
duke@435 1094 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
duke@435 1095 jlong klass_tag,
duke@435 1096 int heap_filter) {
duke@435 1097 // apply the heap filter
duke@435 1098 if (obj_tag != 0) {
duke@435 1099 // filter out tagged objects
duke@435 1100 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
duke@435 1101 } else {
duke@435 1102 // filter out untagged objects
duke@435 1103 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
duke@435 1104 }
duke@435 1105 if (klass_tag != 0) {
duke@435 1106 // filter out objects with tagged classes
duke@435 1107 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
duke@435 1108 } else {
duke@435 1109 // filter out objects with untagged classes.
duke@435 1110 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
duke@435 1111 }
duke@435 1112 return false;
duke@435 1113 }
duke@435 1114
duke@435 1115 // helper function to indicate if an object is filtered by a klass filter
duke@435 1116 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
duke@435 1117 if (!klass_filter.is_null()) {
duke@435 1118 if (obj->klass() != klass_filter()) {
duke@435 1119 return true;
duke@435 1120 }
duke@435 1121 }
duke@435 1122 return false;
duke@435 1123 }
duke@435 1124
duke@435 1125 // helper function to tell if a field is a primitive field or not
duke@435 1126 static inline bool is_primitive_field_type(char type) {
duke@435 1127 return (type != 'L' && type != '[');
duke@435 1128 }
duke@435 1129
duke@435 1130 // helper function to copy the value from location addr to jvalue.
duke@435 1131 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
duke@435 1132 switch (value_type) {
duke@435 1133 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
duke@435 1134 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
duke@435 1135 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
duke@435 1136 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
duke@435 1137 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
duke@435 1138 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
duke@435 1139 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
duke@435 1140 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
duke@435 1141 default: ShouldNotReachHere();
duke@435 1142 }
duke@435 1143 }
duke@435 1144
duke@435 1145 // helper function to invoke string primitive value callback
duke@435 1146 // returns visit control flags
duke@435 1147 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
duke@435 1148 CallbackWrapper* wrapper,
duke@435 1149 oop str,
duke@435 1150 void* user_data)
duke@435 1151 {
never@1577 1152 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
duke@435 1153
duke@435 1154 // get the string value and length
duke@435 1155 // (string value may be offset from the base)
duke@435 1156 int s_len = java_lang_String::length(str);
duke@435 1157 typeArrayOop s_value = java_lang_String::value(str);
duke@435 1158 int s_offset = java_lang_String::offset(str);
duke@435 1159 jchar* value;
duke@435 1160 if (s_len > 0) {
duke@435 1161 value = s_value->char_at_addr(s_offset);
duke@435 1162 } else {
duke@435 1163 value = (jchar*) s_value->base(T_CHAR);
duke@435 1164 }
duke@435 1165
duke@435 1166 // invoke the callback
duke@435 1167 return (*cb)(wrapper->klass_tag(),
duke@435 1168 wrapper->obj_size(),
duke@435 1169 wrapper->obj_tag_p(),
duke@435 1170 value,
duke@435 1171 (jint)s_len,
duke@435 1172 user_data);
duke@435 1173 }
duke@435 1174
duke@435 1175 // helper function to invoke string primitive value callback
duke@435 1176 // returns visit control flags
duke@435 1177 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
duke@435 1178 CallbackWrapper* wrapper,
duke@435 1179 oop obj,
duke@435 1180 void* user_data)
duke@435 1181 {
duke@435 1182 assert(obj->is_typeArray(), "not a primitive array");
duke@435 1183
duke@435 1184 // get base address of first element
duke@435 1185 typeArrayOop array = typeArrayOop(obj);
duke@435 1186 BasicType type = typeArrayKlass::cast(array->klass())->element_type();
duke@435 1187 void* elements = array->base(type);
duke@435 1188
duke@435 1189 // jvmtiPrimitiveType is defined so this mapping is always correct
duke@435 1190 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
duke@435 1191
duke@435 1192 return (*cb)(wrapper->klass_tag(),
duke@435 1193 wrapper->obj_size(),
duke@435 1194 wrapper->obj_tag_p(),
duke@435 1195 (jint)array->length(),
duke@435 1196 elem_type,
duke@435 1197 elements,
duke@435 1198 user_data);
duke@435 1199 }
duke@435 1200
duke@435 1201 // helper function to invoke the primitive field callback for all static fields
duke@435 1202 // of a given class
duke@435 1203 static jint invoke_primitive_field_callback_for_static_fields
duke@435 1204 (CallbackWrapper* wrapper,
duke@435 1205 oop obj,
duke@435 1206 jvmtiPrimitiveFieldCallback cb,
duke@435 1207 void* user_data)
duke@435 1208 {
duke@435 1209 // for static fields only the index will be set
duke@435 1210 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@435 1211
never@1577 1212 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
duke@435 1213 if (java_lang_Class::is_primitive(obj)) {
duke@435 1214 return 0;
duke@435 1215 }
duke@435 1216 klassOop k = java_lang_Class::as_klassOop(obj);
duke@435 1217 Klass* klass = k->klass_part();
duke@435 1218
duke@435 1219 // ignore classes for object and type arrays
duke@435 1220 if (!klass->oop_is_instance()) {
duke@435 1221 return 0;
duke@435 1222 }
duke@435 1223
duke@435 1224 // ignore classes which aren't linked yet
duke@435 1225 instanceKlass* ik = instanceKlass::cast(k);
duke@435 1226 if (!ik->is_linked()) {
duke@435 1227 return 0;
duke@435 1228 }
duke@435 1229
duke@435 1230 // get the field map
duke@435 1231 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
duke@435 1232
duke@435 1233 // invoke the callback for each static primitive field
duke@435 1234 for (int i=0; i<field_map->field_count(); i++) {
duke@435 1235 ClassFieldDescriptor* field = field_map->field_at(i);
duke@435 1236
duke@435 1237 // ignore non-primitive fields
duke@435 1238 char type = field->field_type();
duke@435 1239 if (!is_primitive_field_type(type)) {
duke@435 1240 continue;
duke@435 1241 }
duke@435 1242 // one-to-one mapping
duke@435 1243 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@435 1244
duke@435 1245 // get offset and field value
duke@435 1246 int offset = field->field_offset();
duke@435 1247 address addr = (address)k + offset;
duke@435 1248 jvalue value;
duke@435 1249 copy_to_jvalue(&value, addr, value_type);
duke@435 1250
duke@435 1251 // field index
duke@435 1252 reference_info.field.index = field->field_index();
duke@435 1253
duke@435 1254 // invoke the callback
duke@435 1255 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
duke@435 1256 &reference_info,
duke@435 1257 wrapper->klass_tag(),
duke@435 1258 wrapper->obj_tag_p(),
duke@435 1259 value,
duke@435 1260 value_type,
duke@435 1261 user_data);
duke@435 1262 if (res & JVMTI_VISIT_ABORT) {
duke@435 1263 delete field_map;
duke@435 1264 return res;
duke@435 1265 }
duke@435 1266 }
duke@435 1267
duke@435 1268 delete field_map;
duke@435 1269 return 0;
duke@435 1270 }
duke@435 1271
duke@435 1272 // helper function to invoke the primitive field callback for all instance fields
duke@435 1273 // of a given object
duke@435 1274 static jint invoke_primitive_field_callback_for_instance_fields(
duke@435 1275 CallbackWrapper* wrapper,
duke@435 1276 oop obj,
duke@435 1277 jvmtiPrimitiveFieldCallback cb,
duke@435 1278 void* user_data)
duke@435 1279 {
duke@435 1280 // for instance fields only the index will be set
duke@435 1281 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@435 1282
duke@435 1283 // get the map of the instance fields
duke@435 1284 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
duke@435 1285
duke@435 1286 // invoke the callback for each instance primitive field
duke@435 1287 for (int i=0; i<fields->field_count(); i++) {
duke@435 1288 ClassFieldDescriptor* field = fields->field_at(i);
duke@435 1289
duke@435 1290 // ignore non-primitive fields
duke@435 1291 char type = field->field_type();
duke@435 1292 if (!is_primitive_field_type(type)) {
duke@435 1293 continue;
duke@435 1294 }
duke@435 1295 // one-to-one mapping
duke@435 1296 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@435 1297
duke@435 1298 // get offset and field value
duke@435 1299 int offset = field->field_offset();
duke@435 1300 address addr = (address)obj + offset;
duke@435 1301 jvalue value;
duke@435 1302 copy_to_jvalue(&value, addr, value_type);
duke@435 1303
duke@435 1304 // field index
duke@435 1305 reference_info.field.index = field->field_index();
duke@435 1306
duke@435 1307 // invoke the callback
duke@435 1308 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
duke@435 1309 &reference_info,
duke@435 1310 wrapper->klass_tag(),
duke@435 1311 wrapper->obj_tag_p(),
duke@435 1312 value,
duke@435 1313 value_type,
duke@435 1314 user_data);
duke@435 1315 if (res & JVMTI_VISIT_ABORT) {
duke@435 1316 return res;
duke@435 1317 }
duke@435 1318 }
duke@435 1319 return 0;
duke@435 1320 }
duke@435 1321
duke@435 1322
duke@435 1323 // VM operation to iterate over all objects in the heap (both reachable
duke@435 1324 // and unreachable)
duke@435 1325 class VM_HeapIterateOperation: public VM_Operation {
duke@435 1326 private:
duke@435 1327 ObjectClosure* _blk;
duke@435 1328 public:
duke@435 1329 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
duke@435 1330
duke@435 1331 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
duke@435 1332 void doit() {
duke@435 1333 // allows class files maps to be cached during iteration
duke@435 1334 ClassFieldMapCacheMark cm;
duke@435 1335
duke@435 1336 // make sure that heap is parsable (fills TLABs with filler objects)
duke@435 1337 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
duke@435 1338
duke@435 1339 // Verify heap before iteration - if the heap gets corrupted then
duke@435 1340 // JVMTI's IterateOverHeap will crash.
duke@435 1341 if (VerifyBeforeIteration) {
duke@435 1342 Universe::verify();
duke@435 1343 }
duke@435 1344
duke@435 1345 // do the iteration
jmasa@952 1346 // If this operation encounters a bad object when using CMS,
jmasa@952 1347 // consider using safe_object_iterate() which avoids perm gen
jmasa@952 1348 // objects that may contain bad references.
duke@435 1349 Universe::heap()->object_iterate(_blk);
duke@435 1350
duke@435 1351 // when sharing is enabled we must iterate over the shared spaces
duke@435 1352 if (UseSharedSpaces) {
duke@435 1353 GenCollectedHeap* gch = GenCollectedHeap::heap();
duke@435 1354 CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
duke@435 1355 gen->ro_space()->object_iterate(_blk);
duke@435 1356 gen->rw_space()->object_iterate(_blk);
duke@435 1357 }
duke@435 1358 }
duke@435 1359
duke@435 1360 };
duke@435 1361
duke@435 1362
duke@435 1363 // An ObjectClosure used to support the deprecated IterateOverHeap and
duke@435 1364 // IterateOverInstancesOfClass functions
duke@435 1365 class IterateOverHeapObjectClosure: public ObjectClosure {
duke@435 1366 private:
duke@435 1367 JvmtiTagMap* _tag_map;
duke@435 1368 KlassHandle _klass;
duke@435 1369 jvmtiHeapObjectFilter _object_filter;
duke@435 1370 jvmtiHeapObjectCallback _heap_object_callback;
duke@435 1371 const void* _user_data;
duke@435 1372
duke@435 1373 // accessors
duke@435 1374 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@435 1375 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
duke@435 1376 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
duke@435 1377 KlassHandle klass() const { return _klass; }
duke@435 1378 const void* user_data() const { return _user_data; }
duke@435 1379
duke@435 1380 // indicates if iteration has been aborted
duke@435 1381 bool _iteration_aborted;
duke@435 1382 bool is_iteration_aborted() const { return _iteration_aborted; }
duke@435 1383 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
duke@435 1384
duke@435 1385 public:
duke@435 1386 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
duke@435 1387 KlassHandle klass,
duke@435 1388 jvmtiHeapObjectFilter object_filter,
duke@435 1389 jvmtiHeapObjectCallback heap_object_callback,
duke@435 1390 const void* user_data) :
duke@435 1391 _tag_map(tag_map),
duke@435 1392 _klass(klass),
duke@435 1393 _object_filter(object_filter),
duke@435 1394 _heap_object_callback(heap_object_callback),
duke@435 1395 _user_data(user_data),
duke@435 1396 _iteration_aborted(false)
duke@435 1397 {
duke@435 1398 }
duke@435 1399
duke@435 1400 void do_object(oop o);
duke@435 1401 };
duke@435 1402
duke@435 1403 // invoked for each object in the heap
duke@435 1404 void IterateOverHeapObjectClosure::do_object(oop o) {
duke@435 1405 // check if iteration has been halted
duke@435 1406 if (is_iteration_aborted()) return;
duke@435 1407
duke@435 1408 // ignore any objects that aren't visible to profiler
duke@435 1409 if (!ServiceUtil::visible_oop(o)) return;
duke@435 1410
duke@435 1411 // instanceof check when filtering by klass
duke@435 1412 if (!klass().is_null() && !o->is_a(klass()())) {
duke@435 1413 return;
duke@435 1414 }
duke@435 1415 // prepare for the calllback
duke@435 1416 CallbackWrapper wrapper(tag_map(), o);
duke@435 1417
duke@435 1418 // if the object is tagged and we're only interested in untagged objects
duke@435 1419 // then don't invoke the callback. Similiarly, if the object is untagged
duke@435 1420 // and we're only interested in tagged objects we skip the callback.
duke@435 1421 if (wrapper.obj_tag() != 0) {
duke@435 1422 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
duke@435 1423 } else {
duke@435 1424 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
duke@435 1425 }
duke@435 1426
duke@435 1427 // invoke the agent's callback
duke@435 1428 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
duke@435 1429 wrapper.obj_size(),
duke@435 1430 wrapper.obj_tag_p(),
duke@435 1431 (void*)user_data());
duke@435 1432 if (control == JVMTI_ITERATION_ABORT) {
duke@435 1433 set_iteration_aborted(true);
duke@435 1434 }
duke@435 1435 }
duke@435 1436
duke@435 1437 // An ObjectClosure used to support the IterateThroughHeap function
duke@435 1438 class IterateThroughHeapObjectClosure: public ObjectClosure {
duke@435 1439 private:
duke@435 1440 JvmtiTagMap* _tag_map;
duke@435 1441 KlassHandle _klass;
duke@435 1442 int _heap_filter;
duke@435 1443 const jvmtiHeapCallbacks* _callbacks;
duke@435 1444 const void* _user_data;
duke@435 1445
duke@435 1446 // accessor functions
duke@435 1447 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@435 1448 int heap_filter() const { return _heap_filter; }
duke@435 1449 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
duke@435 1450 KlassHandle klass() const { return _klass; }
duke@435 1451 const void* user_data() const { return _user_data; }
duke@435 1452
duke@435 1453 // indicates if the iteration has been aborted
duke@435 1454 bool _iteration_aborted;
duke@435 1455 bool is_iteration_aborted() const { return _iteration_aborted; }
duke@435 1456
duke@435 1457 // used to check the visit control flags. If the abort flag is set
duke@435 1458 // then we set the iteration aborted flag so that the iteration completes
duke@435 1459 // without processing any further objects
duke@435 1460 bool check_flags_for_abort(jint flags) {
duke@435 1461 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
duke@435 1462 if (is_abort) {
duke@435 1463 _iteration_aborted = true;
duke@435 1464 }
duke@435 1465 return is_abort;
duke@435 1466 }
duke@435 1467
duke@435 1468 public:
duke@435 1469 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
duke@435 1470 KlassHandle klass,
duke@435 1471 int heap_filter,
duke@435 1472 const jvmtiHeapCallbacks* heap_callbacks,
duke@435 1473 const void* user_data) :
duke@435 1474 _tag_map(tag_map),
duke@435 1475 _klass(klass),
duke@435 1476 _heap_filter(heap_filter),
duke@435 1477 _callbacks(heap_callbacks),
duke@435 1478 _user_data(user_data),
duke@435 1479 _iteration_aborted(false)
duke@435 1480 {
duke@435 1481 }
duke@435 1482
duke@435 1483 void do_object(oop o);
duke@435 1484 };
duke@435 1485
duke@435 1486 // invoked for each object in the heap
duke@435 1487 void IterateThroughHeapObjectClosure::do_object(oop obj) {
duke@435 1488 // check if iteration has been halted
duke@435 1489 if (is_iteration_aborted()) return;
duke@435 1490
duke@435 1491 // ignore any objects that aren't visible to profiler
duke@435 1492 if (!ServiceUtil::visible_oop(obj)) return;
duke@435 1493
duke@435 1494 // apply class filter
duke@435 1495 if (is_filtered_by_klass_filter(obj, klass())) return;
duke@435 1496
duke@435 1497 // prepare for callback
duke@435 1498 CallbackWrapper wrapper(tag_map(), obj);
duke@435 1499
duke@435 1500 // check if filtered by the heap filter
duke@435 1501 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
duke@435 1502 return;
duke@435 1503 }
duke@435 1504
duke@435 1505 // for arrays we need the length, otherwise -1
duke@435 1506 bool is_array = obj->is_array();
duke@435 1507 int len = is_array ? arrayOop(obj)->length() : -1;
duke@435 1508
duke@435 1509 // invoke the object callback (if callback is provided)
duke@435 1510 if (callbacks()->heap_iteration_callback != NULL) {
duke@435 1511 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
duke@435 1512 jint res = (*cb)(wrapper.klass_tag(),
duke@435 1513 wrapper.obj_size(),
duke@435 1514 wrapper.obj_tag_p(),
duke@435 1515 (jint)len,
duke@435 1516 (void*)user_data());
duke@435 1517 if (check_flags_for_abort(res)) return;
duke@435 1518 }
duke@435 1519
duke@435 1520 // for objects and classes we report primitive fields if callback provided
duke@435 1521 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
duke@435 1522 jint res;
duke@435 1523 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
never@1577 1524 if (obj->klass() == SystemDictionary::Class_klass()) {
duke@435 1525 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
duke@435 1526 obj,
duke@435 1527 cb,
duke@435 1528 (void*)user_data());
duke@435 1529 } else {
duke@435 1530 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
duke@435 1531 obj,
duke@435 1532 cb,
duke@435 1533 (void*)user_data());
duke@435 1534 }
duke@435 1535 if (check_flags_for_abort(res)) return;
duke@435 1536 }
duke@435 1537
duke@435 1538 // string callback
duke@435 1539 if (!is_array &&
duke@435 1540 callbacks()->string_primitive_value_callback != NULL &&
never@1577 1541 obj->klass() == SystemDictionary::String_klass()) {
duke@435 1542 jint res = invoke_string_value_callback(
duke@435 1543 callbacks()->string_primitive_value_callback,
duke@435 1544 &wrapper,
duke@435 1545 obj,
duke@435 1546 (void*)user_data() );
duke@435 1547 if (check_flags_for_abort(res)) return;
duke@435 1548 }
duke@435 1549
duke@435 1550 // array callback
duke@435 1551 if (is_array &&
duke@435 1552 callbacks()->array_primitive_value_callback != NULL &&
duke@435 1553 obj->is_typeArray()) {
duke@435 1554 jint res = invoke_array_primitive_value_callback(
duke@435 1555 callbacks()->array_primitive_value_callback,
duke@435 1556 &wrapper,
duke@435 1557 obj,
duke@435 1558 (void*)user_data() );
duke@435 1559 if (check_flags_for_abort(res)) return;
duke@435 1560 }
duke@435 1561 };
duke@435 1562
duke@435 1563
duke@435 1564 // Deprecated function to iterate over all objects in the heap
duke@435 1565 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
duke@435 1566 KlassHandle klass,
duke@435 1567 jvmtiHeapObjectCallback heap_object_callback,
duke@435 1568 const void* user_data)
duke@435 1569 {
duke@435 1570 MutexLocker ml(Heap_lock);
duke@435 1571 IterateOverHeapObjectClosure blk(this,
duke@435 1572 klass,
duke@435 1573 object_filter,
duke@435 1574 heap_object_callback,
duke@435 1575 user_data);
duke@435 1576 VM_HeapIterateOperation op(&blk);
duke@435 1577 VMThread::execute(&op);
duke@435 1578 }
duke@435 1579
duke@435 1580
duke@435 1581 // Iterates over all objects in the heap
duke@435 1582 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
duke@435 1583 KlassHandle klass,
duke@435 1584 const jvmtiHeapCallbacks* callbacks,
duke@435 1585 const void* user_data)
duke@435 1586 {
duke@435 1587 MutexLocker ml(Heap_lock);
duke@435 1588 IterateThroughHeapObjectClosure blk(this,
duke@435 1589 klass,
duke@435 1590 heap_filter,
duke@435 1591 callbacks,
duke@435 1592 user_data);
duke@435 1593 VM_HeapIterateOperation op(&blk);
duke@435 1594 VMThread::execute(&op);
duke@435 1595 }
duke@435 1596
duke@435 1597 // support class for get_objects_with_tags
duke@435 1598
duke@435 1599 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
duke@435 1600 private:
duke@435 1601 JvmtiEnv* _env;
duke@435 1602 jlong* _tags;
duke@435 1603 jint _tag_count;
duke@435 1604
duke@435 1605 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
duke@435 1606 GrowableArray<uint64_t>* _tag_results; // collected tags
duke@435 1607
duke@435 1608 public:
duke@435 1609 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
duke@435 1610 _env = env;
duke@435 1611 _tags = (jlong*)tags;
duke@435 1612 _tag_count = tag_count;
duke@435 1613 _object_results = new (ResourceObj::C_HEAP) GrowableArray<jobject>(1,true);
duke@435 1614 _tag_results = new (ResourceObj::C_HEAP) GrowableArray<uint64_t>(1,true);
duke@435 1615 }
duke@435 1616
duke@435 1617 ~TagObjectCollector() {
duke@435 1618 delete _object_results;
duke@435 1619 delete _tag_results;
duke@435 1620 }
duke@435 1621
duke@435 1622 // for each tagged object check if the tag value matches
duke@435 1623 // - if it matches then we create a JNI local reference to the object
duke@435 1624 // and record the reference and tag value.
duke@435 1625 //
duke@435 1626 void do_entry(JvmtiTagHashmapEntry* entry) {
duke@435 1627 for (int i=0; i<_tag_count; i++) {
duke@435 1628 if (_tags[i] == entry->tag()) {
duke@435 1629 oop o = JNIHandles::resolve(entry->object());
duke@435 1630 assert(o != NULL && o != JNIHandles::deleted_handle(), "sanity check");
duke@435 1631
duke@435 1632 // the mirror is tagged
duke@435 1633 if (o->is_klass()) {
duke@435 1634 klassOop k = (klassOop)o;
duke@435 1635 o = Klass::cast(k)->java_mirror();
duke@435 1636 }
duke@435 1637
duke@435 1638 jobject ref = JNIHandles::make_local(JavaThread::current(), o);
duke@435 1639 _object_results->append(ref);
duke@435 1640 _tag_results->append((uint64_t)entry->tag());
duke@435 1641 }
duke@435 1642 }
duke@435 1643 }
duke@435 1644
duke@435 1645 // return the results from the collection
duke@435 1646 //
duke@435 1647 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
duke@435 1648 jvmtiError error;
duke@435 1649 int count = _object_results->length();
duke@435 1650 assert(count >= 0, "sanity check");
duke@435 1651
duke@435 1652 // if object_result_ptr is not NULL then allocate the result and copy
duke@435 1653 // in the object references.
duke@435 1654 if (object_result_ptr != NULL) {
duke@435 1655 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
duke@435 1656 if (error != JVMTI_ERROR_NONE) {
duke@435 1657 return error;
duke@435 1658 }
duke@435 1659 for (int i=0; i<count; i++) {
duke@435 1660 (*object_result_ptr)[i] = _object_results->at(i);
duke@435 1661 }
duke@435 1662 }
duke@435 1663
duke@435 1664 // if tag_result_ptr is not NULL then allocate the result and copy
duke@435 1665 // in the tag values.
duke@435 1666 if (tag_result_ptr != NULL) {
duke@435 1667 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
duke@435 1668 if (error != JVMTI_ERROR_NONE) {
duke@435 1669 if (object_result_ptr != NULL) {
duke@435 1670 _env->Deallocate((unsigned char*)object_result_ptr);
duke@435 1671 }
duke@435 1672 return error;
duke@435 1673 }
duke@435 1674 for (int i=0; i<count; i++) {
duke@435 1675 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
duke@435 1676 }
duke@435 1677 }
duke@435 1678
duke@435 1679 *count_ptr = count;
duke@435 1680 return JVMTI_ERROR_NONE;
duke@435 1681 }
duke@435 1682 };
duke@435 1683
duke@435 1684 // return the list of objects with the specified tags
duke@435 1685 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
duke@435 1686 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
duke@435 1687
duke@435 1688 TagObjectCollector collector(env(), tags, count);
duke@435 1689 {
duke@435 1690 // iterate over all tagged objects
duke@435 1691 MutexLocker ml(lock());
duke@435 1692 entry_iterate(&collector);
duke@435 1693 }
duke@435 1694 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
duke@435 1695 }
duke@435 1696
duke@435 1697
duke@435 1698 // ObjectMarker is used to support the marking objects when walking the
duke@435 1699 // heap.
duke@435 1700 //
duke@435 1701 // This implementation uses the existing mark bits in an object for
duke@435 1702 // marking. Objects that are marked must later have their headers restored.
duke@435 1703 // As most objects are unlocked and don't have their identity hash computed
duke@435 1704 // we don't have to save their headers. Instead we save the headers that
duke@435 1705 // are "interesting". Later when the headers are restored this implementation
duke@435 1706 // restores all headers to their initial value and then restores the few
duke@435 1707 // objects that had interesting headers.
duke@435 1708 //
duke@435 1709 // Future work: This implementation currently uses growable arrays to save
duke@435 1710 // the oop and header of interesting objects. As an optimization we could
duke@435 1711 // use the same technique as the GC and make use of the unused area
duke@435 1712 // between top() and end().
duke@435 1713 //
duke@435 1714
duke@435 1715 // An ObjectClosure used to restore the mark bits of an object
duke@435 1716 class RestoreMarksClosure : public ObjectClosure {
duke@435 1717 public:
duke@435 1718 void do_object(oop o) {
duke@435 1719 if (o != NULL) {
duke@435 1720 markOop mark = o->mark();
duke@435 1721 if (mark->is_marked()) {
duke@435 1722 o->init_mark();
duke@435 1723 }
duke@435 1724 }
duke@435 1725 }
duke@435 1726 };
duke@435 1727
duke@435 1728 // ObjectMarker provides the mark and visited functions
duke@435 1729 class ObjectMarker : AllStatic {
duke@435 1730 private:
duke@435 1731 // saved headers
duke@435 1732 static GrowableArray<oop>* _saved_oop_stack;
duke@435 1733 static GrowableArray<markOop>* _saved_mark_stack;
duke@435 1734
duke@435 1735 public:
duke@435 1736 static void init(); // initialize
duke@435 1737 static void done(); // clean-up
duke@435 1738
duke@435 1739 static inline void mark(oop o); // mark an object
duke@435 1740 static inline bool visited(oop o); // check if object has been visited
duke@435 1741 };
duke@435 1742
duke@435 1743 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
duke@435 1744 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
duke@435 1745
duke@435 1746 // initialize ObjectMarker - prepares for object marking
duke@435 1747 void ObjectMarker::init() {
duke@435 1748 assert(Thread::current()->is_VM_thread(), "must be VMThread");
duke@435 1749
duke@435 1750 // prepare heap for iteration
duke@435 1751 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
duke@435 1752
duke@435 1753 // create stacks for interesting headers
duke@435 1754 _saved_mark_stack = new (ResourceObj::C_HEAP) GrowableArray<markOop>(4000, true);
duke@435 1755 _saved_oop_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
duke@435 1756
duke@435 1757 if (UseBiasedLocking) {
duke@435 1758 BiasedLocking::preserve_marks();
duke@435 1759 }
duke@435 1760 }
duke@435 1761
duke@435 1762 // Object marking is done so restore object headers
duke@435 1763 void ObjectMarker::done() {
duke@435 1764 // iterate over all objects and restore the mark bits to
duke@435 1765 // their initial value
duke@435 1766 RestoreMarksClosure blk;
duke@435 1767 Universe::heap()->object_iterate(&blk);
duke@435 1768
duke@435 1769 // When sharing is enabled we need to restore the headers of the objects
duke@435 1770 // in the readwrite space too.
duke@435 1771 if (UseSharedSpaces) {
duke@435 1772 GenCollectedHeap* gch = GenCollectedHeap::heap();
duke@435 1773 CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
duke@435 1774 gen->rw_space()->object_iterate(&blk);
duke@435 1775 }
duke@435 1776
duke@435 1777 // now restore the interesting headers
duke@435 1778 for (int i = 0; i < _saved_oop_stack->length(); i++) {
duke@435 1779 oop o = _saved_oop_stack->at(i);
duke@435 1780 markOop mark = _saved_mark_stack->at(i);
duke@435 1781 o->set_mark(mark);
duke@435 1782 }
duke@435 1783
duke@435 1784 if (UseBiasedLocking) {
duke@435 1785 BiasedLocking::restore_marks();
duke@435 1786 }
duke@435 1787
duke@435 1788 // free the stacks
duke@435 1789 delete _saved_oop_stack;
duke@435 1790 delete _saved_mark_stack;
duke@435 1791 }
duke@435 1792
duke@435 1793 // mark an object
duke@435 1794 inline void ObjectMarker::mark(oop o) {
duke@435 1795 assert(Universe::heap()->is_in(o), "sanity check");
duke@435 1796 assert(!o->mark()->is_marked(), "should only mark an object once");
duke@435 1797
duke@435 1798 // object's mark word
duke@435 1799 markOop mark = o->mark();
duke@435 1800
duke@435 1801 if (mark->must_be_preserved(o)) {
duke@435 1802 _saved_mark_stack->push(mark);
duke@435 1803 _saved_oop_stack->push(o);
duke@435 1804 }
duke@435 1805
duke@435 1806 // mark the object
duke@435 1807 o->set_mark(markOopDesc::prototype()->set_marked());
duke@435 1808 }
duke@435 1809
duke@435 1810 // return true if object is marked
duke@435 1811 inline bool ObjectMarker::visited(oop o) {
duke@435 1812 return o->mark()->is_marked();
duke@435 1813 }
duke@435 1814
duke@435 1815 // Stack allocated class to help ensure that ObjectMarker is used
duke@435 1816 // correctly. Constructor initializes ObjectMarker, destructor calls
duke@435 1817 // ObjectMarker's done() function to restore object headers.
duke@435 1818 class ObjectMarkerController : public StackObj {
duke@435 1819 public:
duke@435 1820 ObjectMarkerController() {
duke@435 1821 ObjectMarker::init();
duke@435 1822 }
duke@435 1823 ~ObjectMarkerController() {
duke@435 1824 ObjectMarker::done();
duke@435 1825 }
duke@435 1826 };
duke@435 1827
duke@435 1828
duke@435 1829 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
duke@435 1830 // (not performance critical as only used for roots)
duke@435 1831 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
duke@435 1832 switch (kind) {
duke@435 1833 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
duke@435 1834 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
duke@435 1835 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR;
duke@435 1836 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
duke@435 1837 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
duke@435 1838 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
duke@435 1839 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
duke@435 1840 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
duke@435 1841 }
duke@435 1842 }
duke@435 1843
duke@435 1844 // Base class for all heap walk contexts. The base class maintains a flag
duke@435 1845 // to indicate if the context is valid or not.
duke@435 1846 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
duke@435 1847 private:
duke@435 1848 bool _valid;
duke@435 1849 public:
duke@435 1850 HeapWalkContext(bool valid) { _valid = valid; }
duke@435 1851 void invalidate() { _valid = false; }
duke@435 1852 bool is_valid() const { return _valid; }
duke@435 1853 };
duke@435 1854
duke@435 1855 // A basic heap walk context for the deprecated heap walking functions.
duke@435 1856 // The context for a basic heap walk are the callbacks and fields used by
duke@435 1857 // the referrer caching scheme.
duke@435 1858 class BasicHeapWalkContext: public HeapWalkContext {
duke@435 1859 private:
duke@435 1860 jvmtiHeapRootCallback _heap_root_callback;
duke@435 1861 jvmtiStackReferenceCallback _stack_ref_callback;
duke@435 1862 jvmtiObjectReferenceCallback _object_ref_callback;
duke@435 1863
duke@435 1864 // used for caching
duke@435 1865 oop _last_referrer;
duke@435 1866 jlong _last_referrer_tag;
duke@435 1867
duke@435 1868 public:
duke@435 1869 BasicHeapWalkContext() : HeapWalkContext(false) { }
duke@435 1870
duke@435 1871 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
duke@435 1872 jvmtiStackReferenceCallback stack_ref_callback,
duke@435 1873 jvmtiObjectReferenceCallback object_ref_callback) :
duke@435 1874 HeapWalkContext(true),
duke@435 1875 _heap_root_callback(heap_root_callback),
duke@435 1876 _stack_ref_callback(stack_ref_callback),
duke@435 1877 _object_ref_callback(object_ref_callback),
duke@435 1878 _last_referrer(NULL),
duke@435 1879 _last_referrer_tag(0) {
duke@435 1880 }
duke@435 1881
duke@435 1882 // accessors
duke@435 1883 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
duke@435 1884 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
duke@435 1885 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
duke@435 1886
duke@435 1887 oop last_referrer() const { return _last_referrer; }
duke@435 1888 void set_last_referrer(oop referrer) { _last_referrer = referrer; }
duke@435 1889 jlong last_referrer_tag() const { return _last_referrer_tag; }
duke@435 1890 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
duke@435 1891 };
duke@435 1892
duke@435 1893 // The advanced heap walk context for the FollowReferences functions.
duke@435 1894 // The context is the callbacks, and the fields used for filtering.
duke@435 1895 class AdvancedHeapWalkContext: public HeapWalkContext {
duke@435 1896 private:
duke@435 1897 jint _heap_filter;
duke@435 1898 KlassHandle _klass_filter;
duke@435 1899 const jvmtiHeapCallbacks* _heap_callbacks;
duke@435 1900
duke@435 1901 public:
duke@435 1902 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
duke@435 1903
duke@435 1904 AdvancedHeapWalkContext(jint heap_filter,
duke@435 1905 KlassHandle klass_filter,
duke@435 1906 const jvmtiHeapCallbacks* heap_callbacks) :
duke@435 1907 HeapWalkContext(true),
duke@435 1908 _heap_filter(heap_filter),
duke@435 1909 _klass_filter(klass_filter),
duke@435 1910 _heap_callbacks(heap_callbacks) {
duke@435 1911 }
duke@435 1912
duke@435 1913 // accessors
duke@435 1914 jint heap_filter() const { return _heap_filter; }
duke@435 1915 KlassHandle klass_filter() const { return _klass_filter; }
duke@435 1916
duke@435 1917 const jvmtiHeapReferenceCallback heap_reference_callback() const {
duke@435 1918 return _heap_callbacks->heap_reference_callback;
duke@435 1919 };
duke@435 1920 const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
duke@435 1921 return _heap_callbacks->primitive_field_callback;
duke@435 1922 }
duke@435 1923 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
duke@435 1924 return _heap_callbacks->array_primitive_value_callback;
duke@435 1925 }
duke@435 1926 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
duke@435 1927 return _heap_callbacks->string_primitive_value_callback;
duke@435 1928 }
duke@435 1929 };
duke@435 1930
duke@435 1931 // The CallbackInvoker is a class with static functions that the heap walk can call
duke@435 1932 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
duke@435 1933 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
duke@435 1934 // mode is for the newer FollowReferences function which supports a lot of
duke@435 1935 // additional callbacks.
duke@435 1936 class CallbackInvoker : AllStatic {
duke@435 1937 private:
duke@435 1938 // heap walk styles
duke@435 1939 enum { basic, advanced };
duke@435 1940 static int _heap_walk_type;
duke@435 1941 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
duke@435 1942 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
duke@435 1943
duke@435 1944 // context for basic style heap walk
duke@435 1945 static BasicHeapWalkContext _basic_context;
duke@435 1946 static BasicHeapWalkContext* basic_context() {
duke@435 1947 assert(_basic_context.is_valid(), "invalid");
duke@435 1948 return &_basic_context;
duke@435 1949 }
duke@435 1950
duke@435 1951 // context for advanced style heap walk
duke@435 1952 static AdvancedHeapWalkContext _advanced_context;
duke@435 1953 static AdvancedHeapWalkContext* advanced_context() {
duke@435 1954 assert(_advanced_context.is_valid(), "invalid");
duke@435 1955 return &_advanced_context;
duke@435 1956 }
duke@435 1957
duke@435 1958 // context needed for all heap walks
duke@435 1959 static JvmtiTagMap* _tag_map;
duke@435 1960 static const void* _user_data;
duke@435 1961 static GrowableArray<oop>* _visit_stack;
duke@435 1962
duke@435 1963 // accessors
duke@435 1964 static JvmtiTagMap* tag_map() { return _tag_map; }
duke@435 1965 static const void* user_data() { return _user_data; }
duke@435 1966 static GrowableArray<oop>* visit_stack() { return _visit_stack; }
duke@435 1967
duke@435 1968 // if the object hasn't been visited then push it onto the visit stack
duke@435 1969 // so that it will be visited later
duke@435 1970 static inline bool check_for_visit(oop obj) {
duke@435 1971 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
duke@435 1972 return true;
duke@435 1973 }
duke@435 1974
duke@435 1975 // invoke basic style callbacks
duke@435 1976 static inline bool invoke_basic_heap_root_callback
duke@435 1977 (jvmtiHeapRootKind root_kind, oop obj);
duke@435 1978 static inline bool invoke_basic_stack_ref_callback
duke@435 1979 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
duke@435 1980 int slot, oop obj);
duke@435 1981 static inline bool invoke_basic_object_reference_callback
duke@435 1982 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
duke@435 1983
duke@435 1984 // invoke advanced style callbacks
duke@435 1985 static inline bool invoke_advanced_heap_root_callback
duke@435 1986 (jvmtiHeapReferenceKind ref_kind, oop obj);
duke@435 1987 static inline bool invoke_advanced_stack_ref_callback
duke@435 1988 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
duke@435 1989 jmethodID method, jlocation bci, jint slot, oop obj);
duke@435 1990 static inline bool invoke_advanced_object_reference_callback
duke@435 1991 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
duke@435 1992
duke@435 1993 // used to report the value of primitive fields
duke@435 1994 static inline bool report_primitive_field
duke@435 1995 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
duke@435 1996
duke@435 1997 public:
duke@435 1998 // initialize for basic mode
duke@435 1999 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
duke@435 2000 GrowableArray<oop>* visit_stack,
duke@435 2001 const void* user_data,
duke@435 2002 BasicHeapWalkContext context);
duke@435 2003
duke@435 2004 // initialize for advanced mode
duke@435 2005 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
duke@435 2006 GrowableArray<oop>* visit_stack,
duke@435 2007 const void* user_data,
duke@435 2008 AdvancedHeapWalkContext context);
duke@435 2009
duke@435 2010 // functions to report roots
duke@435 2011 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
duke@435 2012 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
duke@435 2013 jmethodID m, oop o);
duke@435 2014 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
duke@435 2015 jmethodID method, jlocation bci, jint slot, oop o);
duke@435 2016
duke@435 2017 // functions to report references
duke@435 2018 static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
duke@435 2019 static inline bool report_class_reference(oop referrer, oop referree);
duke@435 2020 static inline bool report_class_loader_reference(oop referrer, oop referree);
duke@435 2021 static inline bool report_signers_reference(oop referrer, oop referree);
duke@435 2022 static inline bool report_protection_domain_reference(oop referrer, oop referree);
duke@435 2023 static inline bool report_superclass_reference(oop referrer, oop referree);
duke@435 2024 static inline bool report_interface_reference(oop referrer, oop referree);
duke@435 2025 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
duke@435 2026 static inline bool report_field_reference(oop referrer, oop referree, jint slot);
duke@435 2027 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
duke@435 2028 static inline bool report_primitive_array_values(oop array);
duke@435 2029 static inline bool report_string_value(oop str);
duke@435 2030 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
duke@435 2031 static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
duke@435 2032 };
duke@435 2033
duke@435 2034 // statics
duke@435 2035 int CallbackInvoker::_heap_walk_type;
duke@435 2036 BasicHeapWalkContext CallbackInvoker::_basic_context;
duke@435 2037 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
duke@435 2038 JvmtiTagMap* CallbackInvoker::_tag_map;
duke@435 2039 const void* CallbackInvoker::_user_data;
duke@435 2040 GrowableArray<oop>* CallbackInvoker::_visit_stack;
duke@435 2041
duke@435 2042 // initialize for basic heap walk (IterateOverReachableObjects et al)
duke@435 2043 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
duke@435 2044 GrowableArray<oop>* visit_stack,
duke@435 2045 const void* user_data,
duke@435 2046 BasicHeapWalkContext context) {
duke@435 2047 _tag_map = tag_map;
duke@435 2048 _visit_stack = visit_stack;
duke@435 2049 _user_data = user_data;
duke@435 2050 _basic_context = context;
duke@435 2051 _advanced_context.invalidate(); // will trigger assertion if used
duke@435 2052 _heap_walk_type = basic;
duke@435 2053 }
duke@435 2054
duke@435 2055 // initialize for advanced heap walk (FollowReferences)
duke@435 2056 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
duke@435 2057 GrowableArray<oop>* visit_stack,
duke@435 2058 const void* user_data,
duke@435 2059 AdvancedHeapWalkContext context) {
duke@435 2060 _tag_map = tag_map;
duke@435 2061 _visit_stack = visit_stack;
duke@435 2062 _user_data = user_data;
duke@435 2063 _advanced_context = context;
duke@435 2064 _basic_context.invalidate(); // will trigger assertion if used
duke@435 2065 _heap_walk_type = advanced;
duke@435 2066 }
duke@435 2067
duke@435 2068
duke@435 2069 // invoke basic style heap root callback
duke@435 2070 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
duke@435 2071 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2072
duke@435 2073 // if we heap roots should be reported
duke@435 2074 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
duke@435 2075 if (cb == NULL) {
duke@435 2076 return check_for_visit(obj);
duke@435 2077 }
duke@435 2078
duke@435 2079 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2080 jvmtiIterationControl control = (*cb)(root_kind,
duke@435 2081 wrapper.klass_tag(),
duke@435 2082 wrapper.obj_size(),
duke@435 2083 wrapper.obj_tag_p(),
duke@435 2084 (void*)user_data());
duke@435 2085 // push root to visit stack when following references
duke@435 2086 if (control == JVMTI_ITERATION_CONTINUE &&
duke@435 2087 basic_context()->object_ref_callback() != NULL) {
duke@435 2088 visit_stack()->push(obj);
duke@435 2089 }
duke@435 2090 return control != JVMTI_ITERATION_ABORT;
duke@435 2091 }
duke@435 2092
duke@435 2093 // invoke basic style stack ref callback
duke@435 2094 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
duke@435 2095 jlong thread_tag,
duke@435 2096 jint depth,
duke@435 2097 jmethodID method,
duke@435 2098 jint slot,
duke@435 2099 oop obj) {
duke@435 2100 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2101
duke@435 2102 // if we stack refs should be reported
duke@435 2103 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
duke@435 2104 if (cb == NULL) {
duke@435 2105 return check_for_visit(obj);
duke@435 2106 }
duke@435 2107
duke@435 2108 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2109 jvmtiIterationControl control = (*cb)(root_kind,
duke@435 2110 wrapper.klass_tag(),
duke@435 2111 wrapper.obj_size(),
duke@435 2112 wrapper.obj_tag_p(),
duke@435 2113 thread_tag,
duke@435 2114 depth,
duke@435 2115 method,
duke@435 2116 slot,
duke@435 2117 (void*)user_data());
duke@435 2118 // push root to visit stack when following references
duke@435 2119 if (control == JVMTI_ITERATION_CONTINUE &&
duke@435 2120 basic_context()->object_ref_callback() != NULL) {
duke@435 2121 visit_stack()->push(obj);
duke@435 2122 }
duke@435 2123 return control != JVMTI_ITERATION_ABORT;
duke@435 2124 }
duke@435 2125
duke@435 2126 // invoke basic style object reference callback
duke@435 2127 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
duke@435 2128 oop referrer,
duke@435 2129 oop referree,
duke@435 2130 jint index) {
duke@435 2131
duke@435 2132 assert(ServiceUtil::visible_oop(referrer), "checking");
duke@435 2133 assert(ServiceUtil::visible_oop(referree), "checking");
duke@435 2134
duke@435 2135 BasicHeapWalkContext* context = basic_context();
duke@435 2136
duke@435 2137 // callback requires the referrer's tag. If it's the same referrer
duke@435 2138 // as the last call then we use the cached value.
duke@435 2139 jlong referrer_tag;
duke@435 2140 if (referrer == context->last_referrer()) {
duke@435 2141 referrer_tag = context->last_referrer_tag();
duke@435 2142 } else {
duke@435 2143 referrer_tag = tag_for(tag_map(), klassOop_if_java_lang_Class(referrer));
duke@435 2144 }
duke@435 2145
duke@435 2146 // do the callback
duke@435 2147 CallbackWrapper wrapper(tag_map(), referree);
duke@435 2148 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
duke@435 2149 jvmtiIterationControl control = (*cb)(ref_kind,
duke@435 2150 wrapper.klass_tag(),
duke@435 2151 wrapper.obj_size(),
duke@435 2152 wrapper.obj_tag_p(),
duke@435 2153 referrer_tag,
duke@435 2154 index,
duke@435 2155 (void*)user_data());
duke@435 2156
duke@435 2157 // record referrer and referrer tag. For self-references record the
duke@435 2158 // tag value from the callback as this might differ from referrer_tag.
duke@435 2159 context->set_last_referrer(referrer);
duke@435 2160 if (referrer == referree) {
duke@435 2161 context->set_last_referrer_tag(*wrapper.obj_tag_p());
duke@435 2162 } else {
duke@435 2163 context->set_last_referrer_tag(referrer_tag);
duke@435 2164 }
duke@435 2165
duke@435 2166 if (control == JVMTI_ITERATION_CONTINUE) {
duke@435 2167 return check_for_visit(referree);
duke@435 2168 } else {
duke@435 2169 return control != JVMTI_ITERATION_ABORT;
duke@435 2170 }
duke@435 2171 }
duke@435 2172
duke@435 2173 // invoke advanced style heap root callback
duke@435 2174 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
duke@435 2175 oop obj) {
duke@435 2176 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2177
duke@435 2178 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2179
duke@435 2180 // check that callback is provided
duke@435 2181 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@435 2182 if (cb == NULL) {
duke@435 2183 return check_for_visit(obj);
duke@435 2184 }
duke@435 2185
duke@435 2186 // apply class filter
duke@435 2187 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@435 2188 return check_for_visit(obj);
duke@435 2189 }
duke@435 2190
duke@435 2191 // setup the callback wrapper
duke@435 2192 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2193
duke@435 2194 // apply tag filter
duke@435 2195 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2196 wrapper.klass_tag(),
duke@435 2197 context->heap_filter())) {
duke@435 2198 return check_for_visit(obj);
duke@435 2199 }
duke@435 2200
duke@435 2201 // for arrays we need the length, otherwise -1
duke@435 2202 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@435 2203
duke@435 2204 // invoke the callback
duke@435 2205 jint res = (*cb)(ref_kind,
duke@435 2206 NULL, // referrer info
duke@435 2207 wrapper.klass_tag(),
duke@435 2208 0, // referrer_class_tag is 0 for heap root
duke@435 2209 wrapper.obj_size(),
duke@435 2210 wrapper.obj_tag_p(),
duke@435 2211 NULL, // referrer_tag_p
duke@435 2212 len,
duke@435 2213 (void*)user_data());
duke@435 2214 if (res & JVMTI_VISIT_ABORT) {
duke@435 2215 return false;// referrer class tag
duke@435 2216 }
duke@435 2217 if (res & JVMTI_VISIT_OBJECTS) {
duke@435 2218 check_for_visit(obj);
duke@435 2219 }
duke@435 2220 return true;
duke@435 2221 }
duke@435 2222
duke@435 2223 // report a reference from a thread stack to an object
duke@435 2224 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
duke@435 2225 jlong thread_tag,
duke@435 2226 jlong tid,
duke@435 2227 int depth,
duke@435 2228 jmethodID method,
duke@435 2229 jlocation bci,
duke@435 2230 jint slot,
duke@435 2231 oop obj) {
duke@435 2232 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2233
duke@435 2234 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2235
duke@435 2236 // check that callback is provider
duke@435 2237 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@435 2238 if (cb == NULL) {
duke@435 2239 return check_for_visit(obj);
duke@435 2240 }
duke@435 2241
duke@435 2242 // apply class filter
duke@435 2243 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@435 2244 return check_for_visit(obj);
duke@435 2245 }
duke@435 2246
duke@435 2247 // setup the callback wrapper
duke@435 2248 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2249
duke@435 2250 // apply tag filter
duke@435 2251 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2252 wrapper.klass_tag(),
duke@435 2253 context->heap_filter())) {
duke@435 2254 return check_for_visit(obj);
duke@435 2255 }
duke@435 2256
duke@435 2257 // setup the referrer info
duke@435 2258 jvmtiHeapReferenceInfo reference_info;
duke@435 2259 reference_info.stack_local.thread_tag = thread_tag;
duke@435 2260 reference_info.stack_local.thread_id = tid;
duke@435 2261 reference_info.stack_local.depth = depth;
duke@435 2262 reference_info.stack_local.method = method;
duke@435 2263 reference_info.stack_local.location = bci;
duke@435 2264 reference_info.stack_local.slot = slot;
duke@435 2265
duke@435 2266 // for arrays we need the length, otherwise -1
duke@435 2267 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@435 2268
duke@435 2269 // call into the agent
duke@435 2270 int res = (*cb)(ref_kind,
duke@435 2271 &reference_info,
duke@435 2272 wrapper.klass_tag(),
duke@435 2273 0, // referrer_class_tag is 0 for heap root (stack)
duke@435 2274 wrapper.obj_size(),
duke@435 2275 wrapper.obj_tag_p(),
duke@435 2276 NULL, // referrer_tag is 0 for root
duke@435 2277 len,
duke@435 2278 (void*)user_data());
duke@435 2279
duke@435 2280 if (res & JVMTI_VISIT_ABORT) {
duke@435 2281 return false;
duke@435 2282 }
duke@435 2283 if (res & JVMTI_VISIT_OBJECTS) {
duke@435 2284 check_for_visit(obj);
duke@435 2285 }
duke@435 2286 return true;
duke@435 2287 }
duke@435 2288
duke@435 2289 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
duke@435 2290 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
duke@435 2291 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
duke@435 2292 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
duke@435 2293 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
duke@435 2294 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
duke@435 2295 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
duke@435 2296 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
duke@435 2297
duke@435 2298 // invoke the object reference callback to report a reference
duke@435 2299 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
duke@435 2300 oop referrer,
duke@435 2301 oop obj,
duke@435 2302 jint index)
duke@435 2303 {
duke@435 2304 // field index is only valid field in reference_info
duke@435 2305 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@435 2306
duke@435 2307 assert(ServiceUtil::visible_oop(referrer), "checking");
duke@435 2308 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2309
duke@435 2310 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2311
duke@435 2312 // check that callback is provider
duke@435 2313 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
duke@435 2314 if (cb == NULL) {
duke@435 2315 return check_for_visit(obj);
duke@435 2316 }
duke@435 2317
duke@435 2318 // apply class filter
duke@435 2319 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@435 2320 return check_for_visit(obj);
duke@435 2321 }
duke@435 2322
duke@435 2323 // setup the callback wrapper
duke@435 2324 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
duke@435 2325
duke@435 2326 // apply tag filter
duke@435 2327 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2328 wrapper.klass_tag(),
duke@435 2329 context->heap_filter())) {
duke@435 2330 return check_for_visit(obj);
duke@435 2331 }
duke@435 2332
duke@435 2333 // field index is only valid field in reference_info
duke@435 2334 reference_info.field.index = index;
duke@435 2335
duke@435 2336 // for arrays we need the length, otherwise -1
duke@435 2337 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
duke@435 2338
duke@435 2339 // invoke the callback
duke@435 2340 int res = (*cb)(ref_kind,
duke@435 2341 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
duke@435 2342 wrapper.klass_tag(),
duke@435 2343 wrapper.referrer_klass_tag(),
duke@435 2344 wrapper.obj_size(),
duke@435 2345 wrapper.obj_tag_p(),
duke@435 2346 wrapper.referrer_tag_p(),
duke@435 2347 len,
duke@435 2348 (void*)user_data());
duke@435 2349
duke@435 2350 if (res & JVMTI_VISIT_ABORT) {
duke@435 2351 return false;
duke@435 2352 }
duke@435 2353 if (res & JVMTI_VISIT_OBJECTS) {
duke@435 2354 check_for_visit(obj);
duke@435 2355 }
duke@435 2356 return true;
duke@435 2357 }
duke@435 2358
duke@435 2359 // report a "simple root"
duke@435 2360 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
duke@435 2361 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
duke@435 2362 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
duke@435 2363 assert(ServiceUtil::visible_oop(obj), "checking");
duke@435 2364
duke@435 2365 if (is_basic_heap_walk()) {
duke@435 2366 // map to old style root kind
duke@435 2367 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
duke@435 2368 return invoke_basic_heap_root_callback(root_kind, obj);
duke@435 2369 } else {
duke@435 2370 assert(is_advanced_heap_walk(), "wrong heap walk type");
duke@435 2371 return invoke_advanced_heap_root_callback(kind, obj);
duke@435 2372 }
duke@435 2373 }
duke@435 2374
duke@435 2375
duke@435 2376 // invoke the primitive array values
duke@435 2377 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
duke@435 2378 assert(obj->is_typeArray(), "not a primitive array");
duke@435 2379
duke@435 2380 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2381 assert(context->array_primitive_value_callback() != NULL, "no callback");
duke@435 2382
duke@435 2383 // apply class filter
duke@435 2384 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@435 2385 return true;
duke@435 2386 }
duke@435 2387
duke@435 2388 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2389
duke@435 2390 // apply tag filter
duke@435 2391 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2392 wrapper.klass_tag(),
duke@435 2393 context->heap_filter())) {
duke@435 2394 return true;
duke@435 2395 }
duke@435 2396
duke@435 2397 // invoke the callback
duke@435 2398 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
duke@435 2399 &wrapper,
duke@435 2400 obj,
duke@435 2401 (void*)user_data());
duke@435 2402 return (!(res & JVMTI_VISIT_ABORT));
duke@435 2403 }
duke@435 2404
duke@435 2405 // invoke the string value callback
duke@435 2406 inline bool CallbackInvoker::report_string_value(oop str) {
never@1577 2407 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
duke@435 2408
duke@435 2409 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2410 assert(context->string_primitive_value_callback() != NULL, "no callback");
duke@435 2411
duke@435 2412 // apply class filter
duke@435 2413 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
duke@435 2414 return true;
duke@435 2415 }
duke@435 2416
duke@435 2417 CallbackWrapper wrapper(tag_map(), str);
duke@435 2418
duke@435 2419 // apply tag filter
duke@435 2420 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2421 wrapper.klass_tag(),
duke@435 2422 context->heap_filter())) {
duke@435 2423 return true;
duke@435 2424 }
duke@435 2425
duke@435 2426 // invoke the callback
duke@435 2427 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
duke@435 2428 &wrapper,
duke@435 2429 str,
duke@435 2430 (void*)user_data());
duke@435 2431 return (!(res & JVMTI_VISIT_ABORT));
duke@435 2432 }
duke@435 2433
duke@435 2434 // invoke the primitive field callback
duke@435 2435 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
duke@435 2436 oop obj,
duke@435 2437 jint index,
duke@435 2438 address addr,
duke@435 2439 char type)
duke@435 2440 {
duke@435 2441 // for primitive fields only the index will be set
duke@435 2442 static jvmtiHeapReferenceInfo reference_info = { 0 };
duke@435 2443
duke@435 2444 AdvancedHeapWalkContext* context = advanced_context();
duke@435 2445 assert(context->primitive_field_callback() != NULL, "no callback");
duke@435 2446
duke@435 2447 // apply class filter
duke@435 2448 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
duke@435 2449 return true;
duke@435 2450 }
duke@435 2451
duke@435 2452 CallbackWrapper wrapper(tag_map(), obj);
duke@435 2453
duke@435 2454 // apply tag filter
duke@435 2455 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
duke@435 2456 wrapper.klass_tag(),
duke@435 2457 context->heap_filter())) {
duke@435 2458 return true;
duke@435 2459 }
duke@435 2460
duke@435 2461 // the field index in the referrer
duke@435 2462 reference_info.field.index = index;
duke@435 2463
duke@435 2464 // map the type
duke@435 2465 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
duke@435 2466
duke@435 2467 // setup the jvalue
duke@435 2468 jvalue value;
duke@435 2469 copy_to_jvalue(&value, addr, value_type);
duke@435 2470
duke@435 2471 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
duke@435 2472 int res = (*cb)(ref_kind,
duke@435 2473 &reference_info,
duke@435 2474 wrapper.klass_tag(),
duke@435 2475 wrapper.obj_tag_p(),
duke@435 2476 value,
duke@435 2477 value_type,
duke@435 2478 (void*)user_data());
duke@435 2479 return (!(res & JVMTI_VISIT_ABORT));
duke@435 2480 }
duke@435 2481
duke@435 2482
duke@435 2483 // instance field
duke@435 2484 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
duke@435 2485 jint index,
duke@435 2486 address value,
duke@435 2487 char type) {
duke@435 2488 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
duke@435 2489 obj,
duke@435 2490 index,
duke@435 2491 value,
duke@435 2492 type);
duke@435 2493 }
duke@435 2494
duke@435 2495 // static field
duke@435 2496 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
duke@435 2497 jint index,
duke@435 2498 address value,
duke@435 2499 char type) {
duke@435 2500 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
duke@435 2501 obj,
duke@435 2502 index,
duke@435 2503 value,
duke@435 2504 type);
duke@435 2505 }
duke@435 2506
duke@435 2507 // report a JNI local (root object) to the profiler
duke@435 2508 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
duke@435 2509 if (is_basic_heap_walk()) {
duke@435 2510 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
duke@435 2511 thread_tag,
duke@435 2512 depth,
duke@435 2513 m,
duke@435 2514 -1,
duke@435 2515 obj);
duke@435 2516 } else {
duke@435 2517 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
duke@435 2518 thread_tag, tid,
duke@435 2519 depth,
duke@435 2520 m,
duke@435 2521 (jlocation)-1,
duke@435 2522 -1,
duke@435 2523 obj);
duke@435 2524 }
duke@435 2525 }
duke@435 2526
duke@435 2527
duke@435 2528 // report a local (stack reference, root object)
duke@435 2529 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
duke@435 2530 jlong tid,
duke@435 2531 jint depth,
duke@435 2532 jmethodID method,
duke@435 2533 jlocation bci,
duke@435 2534 jint slot,
duke@435 2535 oop obj) {
duke@435 2536 if (is_basic_heap_walk()) {
duke@435 2537 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
duke@435 2538 thread_tag,
duke@435 2539 depth,
duke@435 2540 method,
duke@435 2541 slot,
duke@435 2542 obj);
duke@435 2543 } else {
duke@435 2544 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
duke@435 2545 thread_tag,
duke@435 2546 tid,
duke@435 2547 depth,
duke@435 2548 method,
duke@435 2549 bci,
duke@435 2550 slot,
duke@435 2551 obj);
duke@435 2552 }
duke@435 2553 }
duke@435 2554
duke@435 2555 // report an object referencing a class.
duke@435 2556 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
duke@435 2557 if (is_basic_heap_walk()) {
duke@435 2558 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
duke@435 2559 } else {
duke@435 2560 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
duke@435 2561 }
duke@435 2562 }
duke@435 2563
duke@435 2564 // report a class referencing its class loader.
duke@435 2565 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
duke@435 2566 if (is_basic_heap_walk()) {
duke@435 2567 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
duke@435 2568 } else {
duke@435 2569 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
duke@435 2570 }
duke@435 2571 }
duke@435 2572
duke@435 2573 // report a class referencing its signers.
duke@435 2574 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
duke@435 2575 if (is_basic_heap_walk()) {
duke@435 2576 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
duke@435 2577 } else {
duke@435 2578 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
duke@435 2579 }
duke@435 2580 }
duke@435 2581
duke@435 2582 // report a class referencing its protection domain..
duke@435 2583 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
duke@435 2584 if (is_basic_heap_walk()) {
duke@435 2585 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
duke@435 2586 } else {
duke@435 2587 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
duke@435 2588 }
duke@435 2589 }
duke@435 2590
duke@435 2591 // report a class referencing its superclass.
duke@435 2592 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
duke@435 2593 if (is_basic_heap_walk()) {
duke@435 2594 // Send this to be consistent with past implementation
duke@435 2595 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
duke@435 2596 } else {
duke@435 2597 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
duke@435 2598 }
duke@435 2599 }
duke@435 2600
duke@435 2601 // report a class referencing one of its interfaces.
duke@435 2602 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
duke@435 2603 if (is_basic_heap_walk()) {
duke@435 2604 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
duke@435 2605 } else {
duke@435 2606 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
duke@435 2607 }
duke@435 2608 }
duke@435 2609
duke@435 2610 // report a class referencing one of its static fields.
duke@435 2611 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
duke@435 2612 if (is_basic_heap_walk()) {
duke@435 2613 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
duke@435 2614 } else {
duke@435 2615 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
duke@435 2616 }
duke@435 2617 }
duke@435 2618
duke@435 2619 // report an array referencing an element object
duke@435 2620 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
duke@435 2621 if (is_basic_heap_walk()) {
duke@435 2622 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
duke@435 2623 } else {
duke@435 2624 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
duke@435 2625 }
duke@435 2626 }
duke@435 2627
duke@435 2628 // report an object referencing an instance field object
duke@435 2629 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
duke@435 2630 if (is_basic_heap_walk()) {
duke@435 2631 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
duke@435 2632 } else {
duke@435 2633 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
duke@435 2634 }
duke@435 2635 }
duke@435 2636
duke@435 2637 // report an array referencing an element object
duke@435 2638 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
duke@435 2639 if (is_basic_heap_walk()) {
duke@435 2640 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
duke@435 2641 } else {
duke@435 2642 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
duke@435 2643 }
duke@435 2644 }
duke@435 2645
duke@435 2646 // A supporting closure used to process simple roots
duke@435 2647 class SimpleRootsClosure : public OopClosure {
duke@435 2648 private:
duke@435 2649 jvmtiHeapReferenceKind _kind;
duke@435 2650 bool _continue;
duke@435 2651
duke@435 2652 jvmtiHeapReferenceKind root_kind() { return _kind; }
duke@435 2653
duke@435 2654 public:
duke@435 2655 void set_kind(jvmtiHeapReferenceKind kind) {
duke@435 2656 _kind = kind;
duke@435 2657 _continue = true;
duke@435 2658 }
duke@435 2659
duke@435 2660 inline bool stopped() {
duke@435 2661 return !_continue;
duke@435 2662 }
duke@435 2663
duke@435 2664 void do_oop(oop* obj_p) {
duke@435 2665 // iteration has terminated
duke@435 2666 if (stopped()) {
duke@435 2667 return;
duke@435 2668 }
duke@435 2669
duke@435 2670 // ignore null or deleted handles
duke@435 2671 oop o = *obj_p;
duke@435 2672 if (o == NULL || o == JNIHandles::deleted_handle()) {
duke@435 2673 return;
duke@435 2674 }
duke@435 2675
duke@435 2676 jvmtiHeapReferenceKind kind = root_kind();
duke@435 2677
duke@435 2678 // many roots are Klasses so we use the java mirror
duke@435 2679 if (o->is_klass()) {
duke@435 2680 klassOop k = (klassOop)o;
duke@435 2681 o = Klass::cast(k)->java_mirror();
duke@435 2682 } else {
duke@435 2683
duke@435 2684 // SystemDictionary::always_strong_oops_do reports the application
duke@435 2685 // class loader as a root. We want this root to be reported as
duke@435 2686 // a root kind of "OTHER" rather than "SYSTEM_CLASS".
duke@435 2687 if (o->is_instance() && root_kind() == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
duke@435 2688 kind = JVMTI_HEAP_REFERENCE_OTHER;
duke@435 2689 }
duke@435 2690 }
duke@435 2691
duke@435 2692 // some objects are ignored - in the case of simple
duke@435 2693 // roots it's mostly symbolOops that we are skipping
duke@435 2694 // here.
duke@435 2695 if (!ServiceUtil::visible_oop(o)) {
duke@435 2696 return;
duke@435 2697 }
duke@435 2698
duke@435 2699 // invoke the callback
duke@435 2700 _continue = CallbackInvoker::report_simple_root(kind, o);
duke@435 2701
duke@435 2702 }
coleenp@548 2703 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
duke@435 2704 };
duke@435 2705
duke@435 2706 // A supporting closure used to process JNI locals
duke@435 2707 class JNILocalRootsClosure : public OopClosure {
duke@435 2708 private:
duke@435 2709 jlong _thread_tag;
duke@435 2710 jlong _tid;
duke@435 2711 jint _depth;
duke@435 2712 jmethodID _method;
duke@435 2713 bool _continue;
duke@435 2714 public:
duke@435 2715 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
duke@435 2716 _thread_tag = thread_tag;
duke@435 2717 _tid = tid;
duke@435 2718 _depth = depth;
duke@435 2719 _method = method;
duke@435 2720 _continue = true;
duke@435 2721 }
duke@435 2722
duke@435 2723 inline bool stopped() {
duke@435 2724 return !_continue;
duke@435 2725 }
duke@435 2726
duke@435 2727 void do_oop(oop* obj_p) {
duke@435 2728 // iteration has terminated
duke@435 2729 if (stopped()) {
duke@435 2730 return;
duke@435 2731 }
duke@435 2732
duke@435 2733 // ignore null or deleted handles
duke@435 2734 oop o = *obj_p;
duke@435 2735 if (o == NULL || o == JNIHandles::deleted_handle()) {
duke@435 2736 return;
duke@435 2737 }
duke@435 2738
duke@435 2739 if (!ServiceUtil::visible_oop(o)) {
duke@435 2740 return;
duke@435 2741 }
duke@435 2742
duke@435 2743 // invoke the callback
duke@435 2744 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
duke@435 2745 }
coleenp@548 2746 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
duke@435 2747 };
duke@435 2748
duke@435 2749
duke@435 2750 // A VM operation to iterate over objects that are reachable from
duke@435 2751 // a set of roots or an initial object.
duke@435 2752 //
duke@435 2753 // For VM_HeapWalkOperation the set of roots used is :-
duke@435 2754 //
duke@435 2755 // - All JNI global references
duke@435 2756 // - All inflated monitors
duke@435 2757 // - All classes loaded by the boot class loader (or all classes
duke@435 2758 // in the event that class unloading is disabled)
duke@435 2759 // - All java threads
duke@435 2760 // - For each java thread then all locals and JNI local references
duke@435 2761 // on the thread's execution stack
duke@435 2762 // - All visible/explainable objects from Universes::oops_do
duke@435 2763 //
duke@435 2764 class VM_HeapWalkOperation: public VM_Operation {
duke@435 2765 private:
duke@435 2766 enum {
duke@435 2767 initial_visit_stack_size = 4000
duke@435 2768 };
duke@435 2769
duke@435 2770 bool _is_advanced_heap_walk; // indicates FollowReferences
duke@435 2771 JvmtiTagMap* _tag_map;
duke@435 2772 Handle _initial_object;
duke@435 2773 GrowableArray<oop>* _visit_stack; // the visit stack
duke@435 2774
duke@435 2775 bool _collecting_heap_roots; // are we collecting roots
duke@435 2776 bool _following_object_refs; // are we following object references
duke@435 2777
duke@435 2778 bool _reporting_primitive_fields; // optional reporting
duke@435 2779 bool _reporting_primitive_array_values;
duke@435 2780 bool _reporting_string_values;
duke@435 2781
duke@435 2782 GrowableArray<oop>* create_visit_stack() {
duke@435 2783 return new (ResourceObj::C_HEAP) GrowableArray<oop>(initial_visit_stack_size, true);
duke@435 2784 }
duke@435 2785
duke@435 2786 // accessors
duke@435 2787 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
duke@435 2788 JvmtiTagMap* tag_map() const { return _tag_map; }
duke@435 2789 Handle initial_object() const { return _initial_object; }
duke@435 2790
duke@435 2791 bool is_following_references() const { return _following_object_refs; }
duke@435 2792
duke@435 2793 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
duke@435 2794 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
duke@435 2795 bool is_reporting_string_values() const { return _reporting_string_values; }
duke@435 2796
duke@435 2797 GrowableArray<oop>* visit_stack() const { return _visit_stack; }
duke@435 2798
duke@435 2799 // iterate over the various object types
duke@435 2800 inline bool iterate_over_array(oop o);
duke@435 2801 inline bool iterate_over_type_array(oop o);
duke@435 2802 inline bool iterate_over_class(klassOop o);
duke@435 2803 inline bool iterate_over_object(oop o);
duke@435 2804
duke@435 2805 // root collection
duke@435 2806 inline bool collect_simple_roots();
duke@435 2807 inline bool collect_stack_roots();
duke@435 2808 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
duke@435 2809
duke@435 2810 // visit an object
duke@435 2811 inline bool visit(oop o);
duke@435 2812
duke@435 2813 public:
duke@435 2814 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@435 2815 Handle initial_object,
duke@435 2816 BasicHeapWalkContext callbacks,
duke@435 2817 const void* user_data);
duke@435 2818
duke@435 2819 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@435 2820 Handle initial_object,
duke@435 2821 AdvancedHeapWalkContext callbacks,
duke@435 2822 const void* user_data);
duke@435 2823
duke@435 2824 ~VM_HeapWalkOperation();
duke@435 2825
duke@435 2826 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
duke@435 2827 void doit();
duke@435 2828 };
duke@435 2829
duke@435 2830
duke@435 2831 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@435 2832 Handle initial_object,
duke@435 2833 BasicHeapWalkContext callbacks,
duke@435 2834 const void* user_data) {
duke@435 2835 _is_advanced_heap_walk = false;
duke@435 2836 _tag_map = tag_map;
duke@435 2837 _initial_object = initial_object;
duke@435 2838 _following_object_refs = (callbacks.object_ref_callback() != NULL);
duke@435 2839 _reporting_primitive_fields = false;
duke@435 2840 _reporting_primitive_array_values = false;
duke@435 2841 _reporting_string_values = false;
duke@435 2842 _visit_stack = create_visit_stack();
duke@435 2843
duke@435 2844
duke@435 2845 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
duke@435 2846 }
duke@435 2847
duke@435 2848 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
duke@435 2849 Handle initial_object,
duke@435 2850 AdvancedHeapWalkContext callbacks,
duke@435 2851 const void* user_data) {
duke@435 2852 _is_advanced_heap_walk = true;
duke@435 2853 _tag_map = tag_map;
duke@435 2854 _initial_object = initial_object;
duke@435 2855 _following_object_refs = true;
duke@435 2856 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
duke@435 2857 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
duke@435 2858 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
duke@435 2859 _visit_stack = create_visit_stack();
duke@435 2860
duke@435 2861 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
duke@435 2862 }
duke@435 2863
duke@435 2864 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
duke@435 2865 if (_following_object_refs) {
duke@435 2866 assert(_visit_stack != NULL, "checking");
duke@435 2867 delete _visit_stack;
duke@435 2868 _visit_stack = NULL;
duke@435 2869 }
duke@435 2870 }
duke@435 2871
duke@435 2872 // an array references its class and has a reference to
duke@435 2873 // each element in the array
duke@435 2874 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
duke@435 2875 objArrayOop array = objArrayOop(o);
duke@435 2876 if (array->klass() == Universe::systemObjArrayKlassObj()) {
duke@435 2877 // filtered out
duke@435 2878 return true;
duke@435 2879 }
duke@435 2880
duke@435 2881 // array reference to its class
duke@435 2882 oop mirror = objArrayKlass::cast(array->klass())->java_mirror();
duke@435 2883 if (!CallbackInvoker::report_class_reference(o, mirror)) {
duke@435 2884 return false;
duke@435 2885 }
duke@435 2886
duke@435 2887 // iterate over the array and report each reference to a
duke@435 2888 // non-null element
duke@435 2889 for (int index=0; index<array->length(); index++) {
duke@435 2890 oop elem = array->obj_at(index);
duke@435 2891 if (elem == NULL) {
duke@435 2892 continue;
duke@435 2893 }
duke@435 2894
duke@435 2895 // report the array reference o[index] = elem
duke@435 2896 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
duke@435 2897 return false;
duke@435 2898 }
duke@435 2899 }
duke@435 2900 return true;
duke@435 2901 }
duke@435 2902
duke@435 2903 // a type array references its class
duke@435 2904 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
duke@435 2905 klassOop k = o->klass();
duke@435 2906 oop mirror = Klass::cast(k)->java_mirror();
duke@435 2907 if (!CallbackInvoker::report_class_reference(o, mirror)) {
duke@435 2908 return false;
duke@435 2909 }
duke@435 2910
duke@435 2911 // report the array contents if required
duke@435 2912 if (is_reporting_primitive_array_values()) {
duke@435 2913 if (!CallbackInvoker::report_primitive_array_values(o)) {
duke@435 2914 return false;
duke@435 2915 }
duke@435 2916 }
duke@435 2917 return true;
duke@435 2918 }
duke@435 2919
duke@435 2920 // verify that a static oop field is in range
coleenp@548 2921 static inline bool verify_static_oop(instanceKlass* ik,
coleenp@548 2922 klassOop k, int offset) {
coleenp@548 2923 address obj_p = (address)k + offset;
coleenp@548 2924 address start = (address)ik->start_of_static_fields();
coleenp@548 2925 address end = start + (ik->static_oop_field_size() * heapOopSize);
duke@435 2926 assert(end >= start, "sanity check");
duke@435 2927
duke@435 2928 if (obj_p >= start && obj_p < end) {
duke@435 2929 return true;
duke@435 2930 } else {
duke@435 2931 return false;
duke@435 2932 }
duke@435 2933 }
duke@435 2934
duke@435 2935 // a class references its super class, interfaces, class loader, ...
duke@435 2936 // and finally its static fields
duke@435 2937 inline bool VM_HeapWalkOperation::iterate_over_class(klassOop k) {
duke@435 2938 int i;
duke@435 2939 Klass* klass = klassOop(k)->klass_part();
duke@435 2940
duke@435 2941 if (klass->oop_is_instance()) {
duke@435 2942 instanceKlass* ik = instanceKlass::cast(k);
duke@435 2943
duke@435 2944 // ignore the class if it's has been initialized yet
duke@435 2945 if (!ik->is_linked()) {
duke@435 2946 return true;
duke@435 2947 }
duke@435 2948
duke@435 2949 // get the java mirror
duke@435 2950 oop mirror = klass->java_mirror();
duke@435 2951
duke@435 2952 // super (only if something more interesting than java.lang.Object)
duke@435 2953 klassOop java_super = ik->java_super();
never@1577 2954 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
duke@435 2955 oop super = Klass::cast(java_super)->java_mirror();
duke@435 2956 if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
duke@435 2957 return false;
duke@435 2958 }
duke@435 2959 }
duke@435 2960
duke@435 2961 // class loader
duke@435 2962 oop cl = ik->class_loader();
duke@435 2963 if (cl != NULL) {
duke@435 2964 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
duke@435 2965 return false;
duke@435 2966 }
duke@435 2967 }
duke@435 2968
duke@435 2969 // protection domain
duke@435 2970 oop pd = ik->protection_domain();
duke@435 2971 if (pd != NULL) {
duke@435 2972 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
duke@435 2973 return false;
duke@435 2974 }
duke@435 2975 }
duke@435 2976
duke@435 2977 // signers
duke@435 2978 oop signers = ik->signers();
duke@435 2979 if (signers != NULL) {
duke@435 2980 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
duke@435 2981 return false;
duke@435 2982 }
duke@435 2983 }
duke@435 2984
duke@435 2985 // references from the constant pool
duke@435 2986 {
duke@435 2987 const constantPoolOop pool = ik->constants();
duke@435 2988 for (int i = 1; i < pool->length(); i++) {
duke@435 2989 constantTag tag = pool->tag_at(i).value();
duke@435 2990 if (tag.is_string() || tag.is_klass()) {
duke@435 2991 oop entry;
duke@435 2992 if (tag.is_string()) {
duke@435 2993 entry = pool->resolved_string_at(i);
duke@435 2994 assert(java_lang_String::is_instance(entry), "must be string");
duke@435 2995 } else {
duke@435 2996 entry = Klass::cast(pool->resolved_klass_at(i))->java_mirror();
duke@435 2997 }
duke@435 2998 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
duke@435 2999 return false;
duke@435 3000 }
duke@435 3001 }
duke@435 3002 }
duke@435 3003 }
duke@435 3004
duke@435 3005 // interfaces
duke@435 3006 // (These will already have been reported as references from the constant pool
duke@435 3007 // but are specified by IterateOverReachableObjects and must be reported).
duke@435 3008 objArrayOop interfaces = ik->local_interfaces();
duke@435 3009 for (i = 0; i < interfaces->length(); i++) {
duke@435 3010 oop interf = Klass::cast((klassOop)interfaces->obj_at(i))->java_mirror();
duke@435 3011 if (interf == NULL) {
duke@435 3012 continue;
duke@435 3013 }
duke@435 3014 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
duke@435 3015 return false;
duke@435 3016 }
duke@435 3017 }
duke@435 3018
duke@435 3019 // iterate over the static fields
duke@435 3020
duke@435 3021 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(k);
duke@435 3022 for (i=0; i<field_map->field_count(); i++) {
duke@435 3023 ClassFieldDescriptor* field = field_map->field_at(i);
duke@435 3024 char type = field->field_type();
duke@435 3025 if (!is_primitive_field_type(type)) {
coleenp@548 3026 oop fld_o = k->obj_field(field->field_offset());
coleenp@548 3027 assert(verify_static_oop(ik, k, field->field_offset()), "sanity check");
duke@435 3028 if (fld_o != NULL) {
duke@435 3029 int slot = field->field_index();
duke@435 3030 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
duke@435 3031 delete field_map;
duke@435 3032 return false;
duke@435 3033 }
duke@435 3034 }
duke@435 3035 } else {
duke@435 3036 if (is_reporting_primitive_fields()) {
duke@435 3037 address addr = (address)k + field->field_offset();
duke@435 3038 int slot = field->field_index();
duke@435 3039 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
duke@435 3040 delete field_map;
duke@435 3041 return false;
duke@435 3042 }
duke@435 3043 }
duke@435 3044 }
duke@435 3045 }
duke@435 3046 delete field_map;
duke@435 3047
duke@435 3048 return true;
duke@435 3049 }
duke@435 3050
duke@435 3051 return true;
duke@435 3052 }
duke@435 3053
duke@435 3054 // an object references a class and its instance fields
duke@435 3055 // (static fields are ignored here as we report these as
duke@435 3056 // references from the class).
duke@435 3057 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
duke@435 3058 // reference to the class
duke@435 3059 if (!CallbackInvoker::report_class_reference(o, Klass::cast(o->klass())->java_mirror())) {
duke@435 3060 return false;
duke@435 3061 }
duke@435 3062
duke@435 3063 // iterate over instance fields
duke@435 3064 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
duke@435 3065 for (int i=0; i<field_map->field_count(); i++) {
duke@435 3066 ClassFieldDescriptor* field = field_map->field_at(i);
duke@435 3067 char type = field->field_type();
duke@435 3068 if (!is_primitive_field_type(type)) {
coleenp@548 3069 oop fld_o = o->obj_field(field->field_offset());
duke@435 3070 if (fld_o != NULL) {
duke@435 3071 // reflection code may have a reference to a klassOop.
duke@435 3072 // - see sun.reflect.UnsafeStaticFieldAccessorImpl and sun.misc.Unsafe
duke@435 3073 if (fld_o->is_klass()) {
duke@435 3074 klassOop k = (klassOop)fld_o;
duke@435 3075 fld_o = Klass::cast(k)->java_mirror();
duke@435 3076 }
duke@435 3077 int slot = field->field_index();
duke@435 3078 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
duke@435 3079 return false;
duke@435 3080 }
duke@435 3081 }
duke@435 3082 } else {
duke@435 3083 if (is_reporting_primitive_fields()) {
duke@435 3084 // primitive instance field
duke@435 3085 address addr = (address)o + field->field_offset();
duke@435 3086 int slot = field->field_index();
duke@435 3087 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
duke@435 3088 return false;
duke@435 3089 }
duke@435 3090 }
duke@435 3091 }
duke@435 3092 }
duke@435 3093
duke@435 3094 // if the object is a java.lang.String
duke@435 3095 if (is_reporting_string_values() &&
never@1577 3096 o->klass() == SystemDictionary::String_klass()) {
duke@435 3097 if (!CallbackInvoker::report_string_value(o)) {
duke@435 3098 return false;
duke@435 3099 }
duke@435 3100 }
duke@435 3101 return true;
duke@435 3102 }
duke@435 3103
duke@435 3104
duke@435 3105 // collects all simple (non-stack) roots.
duke@435 3106 // if there's a heap root callback provided then the callback is
duke@435 3107 // invoked for each simple root.
duke@435 3108 // if an object reference callback is provided then all simple
duke@435 3109 // roots are pushed onto the marking stack so that they can be
duke@435 3110 // processed later
duke@435 3111 //
duke@435 3112 inline bool VM_HeapWalkOperation::collect_simple_roots() {
duke@435 3113 SimpleRootsClosure blk;
duke@435 3114
duke@435 3115 // JNI globals
duke@435 3116 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
duke@435 3117 JNIHandles::oops_do(&blk);
duke@435 3118 if (blk.stopped()) {
duke@435 3119 return false;
duke@435 3120 }
duke@435 3121
duke@435 3122 // Preloaded classes and loader from the system dictionary
duke@435 3123 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
duke@435 3124 SystemDictionary::always_strong_oops_do(&blk);
duke@435 3125 if (blk.stopped()) {
duke@435 3126 return false;
duke@435 3127 }
duke@435 3128
duke@435 3129 // Inflated monitors
duke@435 3130 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
duke@435 3131 ObjectSynchronizer::oops_do(&blk);
duke@435 3132 if (blk.stopped()) {
duke@435 3133 return false;
duke@435 3134 }
duke@435 3135
duke@435 3136 // Threads
duke@435 3137 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
duke@435 3138 oop threadObj = thread->threadObj();
duke@435 3139 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
duke@435 3140 bool cont = CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, threadObj);
duke@435 3141 if (!cont) {
duke@435 3142 return false;
duke@435 3143 }
duke@435 3144 }
duke@435 3145 }
duke@435 3146
duke@435 3147 // Other kinds of roots maintained by HotSpot
duke@435 3148 // Many of these won't be visible but others (such as instances of important
duke@435 3149 // exceptions) will be visible.
duke@435 3150 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
duke@435 3151 Universe::oops_do(&blk);
jrose@1424 3152
jrose@1424 3153 // If there are any non-perm roots in the code cache, visit them.
jrose@1424 3154 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
jrose@1424 3155 CodeBlobToOopClosure look_in_blobs(&blk, false);
jrose@1424 3156 CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
jrose@1424 3157
duke@435 3158 return true;
duke@435 3159 }
duke@435 3160
duke@435 3161 // Walk the stack of a given thread and find all references (locals
duke@435 3162 // and JNI calls) and report these as stack references
duke@435 3163 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
duke@435 3164 JNILocalRootsClosure* blk)
duke@435 3165 {
duke@435 3166 oop threadObj = java_thread->threadObj();
duke@435 3167 assert(threadObj != NULL, "sanity check");
duke@435 3168
duke@435 3169 // only need to get the thread's tag once per thread
duke@435 3170 jlong thread_tag = tag_for(_tag_map, threadObj);
duke@435 3171
duke@435 3172 // also need the thread id
duke@435 3173 jlong tid = java_lang_Thread::thread_id(threadObj);
duke@435 3174
duke@435 3175
duke@435 3176 if (java_thread->has_last_Java_frame()) {
duke@435 3177
duke@435 3178 // vframes are resource allocated
duke@435 3179 Thread* current_thread = Thread::current();
duke@435 3180 ResourceMark rm(current_thread);
duke@435 3181 HandleMark hm(current_thread);
duke@435 3182
duke@435 3183 RegisterMap reg_map(java_thread);
duke@435 3184 frame f = java_thread->last_frame();
duke@435 3185 vframe* vf = vframe::new_vframe(&f, &reg_map, java_thread);
duke@435 3186
duke@435 3187 bool is_top_frame = true;
duke@435 3188 int depth = 0;
duke@435 3189 frame* last_entry_frame = NULL;
duke@435 3190
duke@435 3191 while (vf != NULL) {
duke@435 3192 if (vf->is_java_frame()) {
duke@435 3193
duke@435 3194 // java frame (interpreted, compiled, ...)
duke@435 3195 javaVFrame *jvf = javaVFrame::cast(vf);
duke@435 3196
duke@435 3197 // the jmethodID
duke@435 3198 jmethodID method = jvf->method()->jmethod_id();
duke@435 3199
duke@435 3200 if (!(jvf->method()->is_native())) {
duke@435 3201 jlocation bci = (jlocation)jvf->bci();
duke@435 3202 StackValueCollection* locals = jvf->locals();
duke@435 3203 for (int slot=0; slot<locals->size(); slot++) {
duke@435 3204 if (locals->at(slot)->type() == T_OBJECT) {
duke@435 3205 oop o = locals->obj_at(slot)();
duke@435 3206 if (o == NULL) {
duke@435 3207 continue;
duke@435 3208 }
duke@435 3209
duke@435 3210 // stack reference
duke@435 3211 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
duke@435 3212 bci, slot, o)) {
duke@435 3213 return false;
duke@435 3214 }
duke@435 3215 }
duke@435 3216 }
duke@435 3217 } else {
duke@435 3218 blk->set_context(thread_tag, tid, depth, method);
duke@435 3219 if (is_top_frame) {
duke@435 3220 // JNI locals for the top frame.
duke@435 3221 java_thread->active_handles()->oops_do(blk);
duke@435 3222 } else {
duke@435 3223 if (last_entry_frame != NULL) {
duke@435 3224 // JNI locals for the entry frame
duke@435 3225 assert(last_entry_frame->is_entry_frame(), "checking");
duke@435 3226 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
duke@435 3227 }
duke@435 3228 }
duke@435 3229 }
duke@435 3230 last_entry_frame = NULL;
duke@435 3231 depth++;
duke@435 3232 } else {
duke@435 3233 // externalVFrame - for an entry frame then we report the JNI locals
duke@435 3234 // when we find the corresponding javaVFrame
duke@435 3235 frame* fr = vf->frame_pointer();
duke@435 3236 assert(fr != NULL, "sanity check");
duke@435 3237 if (fr->is_entry_frame()) {
duke@435 3238 last_entry_frame = fr;
duke@435 3239 }
duke@435 3240 }
duke@435 3241
duke@435 3242 vf = vf->sender();
duke@435 3243 is_top_frame = false;
duke@435 3244 }
duke@435 3245 } else {
duke@435 3246 // no last java frame but there may be JNI locals
duke@435 3247 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
duke@435 3248 java_thread->active_handles()->oops_do(blk);
duke@435 3249 }
duke@435 3250 return true;
duke@435 3251 }
duke@435 3252
duke@435 3253
duke@435 3254 // collects all stack roots - for each thread it walks the execution
duke@435 3255 // stack to find all references and local JNI refs.
duke@435 3256 inline bool VM_HeapWalkOperation::collect_stack_roots() {
duke@435 3257 JNILocalRootsClosure blk;
duke@435 3258 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
duke@435 3259 oop threadObj = thread->threadObj();
duke@435 3260 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
duke@435 3261 if (!collect_stack_roots(thread, &blk)) {
duke@435 3262 return false;
duke@435 3263 }
duke@435 3264 }
duke@435 3265 }
duke@435 3266 return true;
duke@435 3267 }
duke@435 3268
duke@435 3269 // visit an object
duke@435 3270 // first mark the object as visited
duke@435 3271 // second get all the outbound references from this object (in other words, all
duke@435 3272 // the objects referenced by this object).
duke@435 3273 //
duke@435 3274 bool VM_HeapWalkOperation::visit(oop o) {
duke@435 3275 // mark object as visited
duke@435 3276 assert(!ObjectMarker::visited(o), "can't visit same object more than once");
duke@435 3277 ObjectMarker::mark(o);
duke@435 3278
duke@435 3279 // instance
duke@435 3280 if (o->is_instance()) {
never@1577 3281 if (o->klass() == SystemDictionary::Class_klass()) {
duke@435 3282 o = klassOop_if_java_lang_Class(o);
duke@435 3283 if (o->is_klass()) {
duke@435 3284 // a java.lang.Class
duke@435 3285 return iterate_over_class(klassOop(o));
duke@435 3286 }
duke@435 3287 } else {
duke@435 3288 return iterate_over_object(o);
duke@435 3289 }
duke@435 3290 }
duke@435 3291
duke@435 3292 // object array
duke@435 3293 if (o->is_objArray()) {
duke@435 3294 return iterate_over_array(o);
duke@435 3295 }
duke@435 3296
duke@435 3297 // type array
duke@435 3298 if (o->is_typeArray()) {
duke@435 3299 return iterate_over_type_array(o);
duke@435 3300 }
duke@435 3301
duke@435 3302 return true;
duke@435 3303 }
duke@435 3304
duke@435 3305 void VM_HeapWalkOperation::doit() {
duke@435 3306 ResourceMark rm;
duke@435 3307 ObjectMarkerController marker;
duke@435 3308 ClassFieldMapCacheMark cm;
duke@435 3309
duke@435 3310 assert(visit_stack()->is_empty(), "visit stack must be empty");
duke@435 3311
duke@435 3312 // the heap walk starts with an initial object or the heap roots
duke@435 3313 if (initial_object().is_null()) {
duke@435 3314 if (!collect_simple_roots()) return;
duke@435 3315 if (!collect_stack_roots()) return;
duke@435 3316 } else {
duke@435 3317 visit_stack()->push(initial_object()());
duke@435 3318 }
duke@435 3319
duke@435 3320 // object references required
duke@435 3321 if (is_following_references()) {
duke@435 3322
duke@435 3323 // visit each object until all reachable objects have been
duke@435 3324 // visited or the callback asked to terminate the iteration.
duke@435 3325 while (!visit_stack()->is_empty()) {
duke@435 3326 oop o = visit_stack()->pop();
duke@435 3327 if (!ObjectMarker::visited(o)) {
duke@435 3328 if (!visit(o)) {
duke@435 3329 break;
duke@435 3330 }
duke@435 3331 }
duke@435 3332 }
duke@435 3333 }
duke@435 3334 }
duke@435 3335
duke@435 3336 // iterate over all objects that are reachable from a set of roots
duke@435 3337 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
duke@435 3338 jvmtiStackReferenceCallback stack_ref_callback,
duke@435 3339 jvmtiObjectReferenceCallback object_ref_callback,
duke@435 3340 const void* user_data) {
duke@435 3341 MutexLocker ml(Heap_lock);
duke@435 3342 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
duke@435 3343 VM_HeapWalkOperation op(this, Handle(), context, user_data);
duke@435 3344 VMThread::execute(&op);
duke@435 3345 }
duke@435 3346
duke@435 3347 // iterate over all objects that are reachable from a given object
duke@435 3348 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
duke@435 3349 jvmtiObjectReferenceCallback object_ref_callback,
duke@435 3350 const void* user_data) {
duke@435 3351 oop obj = JNIHandles::resolve(object);
duke@435 3352 Handle initial_object(Thread::current(), obj);
duke@435 3353
duke@435 3354 MutexLocker ml(Heap_lock);
duke@435 3355 BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
duke@435 3356 VM_HeapWalkOperation op(this, initial_object, context, user_data);
duke@435 3357 VMThread::execute(&op);
duke@435 3358 }
duke@435 3359
duke@435 3360 // follow references from an initial object or the GC roots
duke@435 3361 void JvmtiTagMap::follow_references(jint heap_filter,
duke@435 3362 KlassHandle klass,
duke@435 3363 jobject object,
duke@435 3364 const jvmtiHeapCallbacks* callbacks,
duke@435 3365 const void* user_data)
duke@435 3366 {
duke@435 3367 oop obj = JNIHandles::resolve(object);
duke@435 3368 Handle initial_object(Thread::current(), obj);
duke@435 3369
duke@435 3370 MutexLocker ml(Heap_lock);
duke@435 3371 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
duke@435 3372 VM_HeapWalkOperation op(this, initial_object, context, user_data);
duke@435 3373 VMThread::execute(&op);
duke@435 3374 }
duke@435 3375
duke@435 3376
duke@435 3377 // called post-GC
duke@435 3378 // - for each JVMTI environment with an object tag map, call its rehash
duke@435 3379 // function to re-sync with the new object locations.
duke@435 3380 void JvmtiTagMap::gc_epilogue(bool full) {
duke@435 3381 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
duke@435 3382 if (JvmtiEnv::environments_might_exist()) {
duke@435 3383 // re-obtain the memory region for the young generation (might
duke@435 3384 // changed due to adaptive resizing policy)
duke@435 3385 get_young_generation();
duke@435 3386
duke@435 3387 JvmtiEnvIterator it;
duke@435 3388 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
duke@435 3389 JvmtiTagMap* tag_map = env->tag_map();
duke@435 3390 if (tag_map != NULL && !tag_map->is_empty()) {
duke@435 3391 TraceTime t(full ? "JVMTI Full Rehash " : "JVMTI Rehash ", TraceJVMTIObjectTagging);
duke@435 3392 if (full) {
duke@435 3393 tag_map->rehash(0, n_hashmaps);
duke@435 3394 } else {
duke@435 3395 tag_map->rehash(0, 0); // tag map for young gen only
duke@435 3396 }
duke@435 3397 }
duke@435 3398 }
duke@435 3399 }
duke@435 3400 }
duke@435 3401
duke@435 3402 // CMS has completed referencing processing so we may have JNI weak refs
duke@435 3403 // to objects in the CMS generation that have been GC'ed.
duke@435 3404 void JvmtiTagMap::cms_ref_processing_epilogue() {
duke@435 3405 assert(SafepointSynchronize::is_at_safepoint(), "must be executed at a safepoint");
duke@435 3406 assert(UseConcMarkSweepGC, "should only be used with CMS");
duke@435 3407 if (JvmtiEnv::environments_might_exist()) {
duke@435 3408 JvmtiEnvIterator it;
duke@435 3409 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
duke@435 3410 JvmtiTagMap* tag_map = ((JvmtiEnvBase *)env)->tag_map();
duke@435 3411 if (tag_map != NULL && !tag_map->is_empty()) {
duke@435 3412 TraceTime t("JVMTI Rehash (CMS) ", TraceJVMTIObjectTagging);
duke@435 3413 tag_map->rehash(1, n_hashmaps); // assume CMS not used in young gen
duke@435 3414 }
duke@435 3415 }
duke@435 3416 }
duke@435 3417 }
duke@435 3418
duke@435 3419
duke@435 3420 // For each entry in the hashmaps 'start' to 'end' :
duke@435 3421 //
duke@435 3422 // 1. resolve the JNI weak reference
duke@435 3423 //
duke@435 3424 // 2. If it resolves to NULL it means the object has been freed so the entry
duke@435 3425 // is removed, the weak reference destroyed, and the object free event is
duke@435 3426 // posted (if enabled).
duke@435 3427 //
duke@435 3428 // 3. If the weak reference resolves to an object then we re-hash the object
duke@435 3429 // to see if it has moved or has been promoted (from the young to the old
duke@435 3430 // generation for example).
duke@435 3431 //
duke@435 3432 void JvmtiTagMap::rehash(int start, int end) {
duke@435 3433
duke@435 3434 // does this environment have the OBJECT_FREE event enabled
duke@435 3435 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
duke@435 3436
duke@435 3437 // counters used for trace message
duke@435 3438 int freed = 0;
duke@435 3439 int moved = 0;
duke@435 3440 int promoted = 0;
duke@435 3441
duke@435 3442 // we assume there are two hashmaps - one for the young generation
duke@435 3443 // and the other for all other spaces.
duke@435 3444 assert(n_hashmaps == 2, "not implemented");
duke@435 3445 JvmtiTagHashmap* young_hashmap = _hashmap[0];
duke@435 3446 JvmtiTagHashmap* other_hashmap = _hashmap[1];
duke@435 3447
duke@435 3448 // reenable sizing (if disabled)
duke@435 3449 young_hashmap->set_resizing_enabled(true);
duke@435 3450 other_hashmap->set_resizing_enabled(true);
duke@435 3451
duke@435 3452 // when re-hashing the hashmap corresponding to the young generation we
duke@435 3453 // collect the entries corresponding to objects that have been promoted.
duke@435 3454 JvmtiTagHashmapEntry* promoted_entries = NULL;
duke@435 3455
duke@435 3456 if (end >= n_hashmaps) {
duke@435 3457 end = n_hashmaps - 1;
duke@435 3458 }
duke@435 3459
duke@435 3460 for (int i=start; i <= end; i++) {
duke@435 3461 JvmtiTagHashmap* hashmap = _hashmap[i];
duke@435 3462
duke@435 3463 // if the hashmap is empty then we can skip it
duke@435 3464 if (hashmap->_entry_count == 0) {
duke@435 3465 continue;
duke@435 3466 }
duke@435 3467
duke@435 3468 // now iterate through each entry in the table
duke@435 3469
duke@435 3470 JvmtiTagHashmapEntry** table = hashmap->table();
duke@435 3471 int size = hashmap->size();
duke@435 3472
duke@435 3473 for (int pos=0; pos<size; pos++) {
duke@435 3474 JvmtiTagHashmapEntry* entry = table[pos];
duke@435 3475 JvmtiTagHashmapEntry* prev = NULL;
duke@435 3476
duke@435 3477 while (entry != NULL) {
duke@435 3478 JvmtiTagHashmapEntry* next = entry->next();
duke@435 3479
duke@435 3480 jweak ref = entry->object();
duke@435 3481 oop oop = JNIHandles::resolve(ref);
duke@435 3482
duke@435 3483 // has object been GC'ed
duke@435 3484 if (oop == NULL) {
duke@435 3485 // grab the tag
duke@435 3486 jlong tag = entry->tag();
duke@435 3487 guarantee(tag != 0, "checking");
duke@435 3488
duke@435 3489 // remove GC'ed entry from hashmap and return the
duke@435 3490 // entry to the free list
duke@435 3491 hashmap->remove(prev, pos, entry);
duke@435 3492 destroy_entry(entry);
duke@435 3493
duke@435 3494 // destroy the weak ref
duke@435 3495 JNIHandles::destroy_weak_global(ref);
duke@435 3496
duke@435 3497 // post the event to the profiler
duke@435 3498 if (post_object_free) {
duke@435 3499 JvmtiExport::post_object_free(env(), tag);
duke@435 3500 }
duke@435 3501
duke@435 3502 freed++;
duke@435 3503 entry = next;
duke@435 3504 continue;
duke@435 3505 }
duke@435 3506
duke@435 3507 // if this is the young hashmap then the object is either promoted
duke@435 3508 // or moved.
duke@435 3509 // if this is the other hashmap then the object is moved.
duke@435 3510
duke@435 3511 bool same_gen;
duke@435 3512 if (i == 0) {
duke@435 3513 assert(hashmap == young_hashmap, "checking");
duke@435 3514 same_gen = is_in_young(oop);
duke@435 3515 } else {
duke@435 3516 same_gen = true;
duke@435 3517 }
duke@435 3518
duke@435 3519
duke@435 3520 if (same_gen) {
duke@435 3521 // if the object has moved then re-hash it and move its
duke@435 3522 // entry to its new location.
duke@435 3523 unsigned int new_pos = JvmtiTagHashmap::hash(oop, size);
duke@435 3524 if (new_pos != (unsigned int)pos) {
duke@435 3525 if (prev == NULL) {
duke@435 3526 table[pos] = next;
duke@435 3527 } else {
duke@435 3528 prev->set_next(next);
duke@435 3529 }
duke@435 3530 entry->set_next(table[new_pos]);
duke@435 3531 table[new_pos] = entry;
duke@435 3532 moved++;
duke@435 3533 } else {
duke@435 3534 // object didn't move
duke@435 3535 prev = entry;
duke@435 3536 }
duke@435 3537 } else {
duke@435 3538 // object has been promoted so remove the entry from the
duke@435 3539 // young hashmap
duke@435 3540 assert(hashmap == young_hashmap, "checking");
duke@435 3541 hashmap->remove(prev, pos, entry);
duke@435 3542
duke@435 3543 // move the entry to the promoted list
duke@435 3544 entry->set_next(promoted_entries);
duke@435 3545 promoted_entries = entry;
duke@435 3546 }
duke@435 3547
duke@435 3548 entry = next;
duke@435 3549 }
duke@435 3550 }
duke@435 3551 }
duke@435 3552
duke@435 3553
duke@435 3554 // add the entries, corresponding to the promoted objects, to the
duke@435 3555 // other hashmap.
duke@435 3556 JvmtiTagHashmapEntry* entry = promoted_entries;
duke@435 3557 while (entry != NULL) {
duke@435 3558 oop o = JNIHandles::resolve(entry->object());
duke@435 3559 assert(hashmap_for(o) == other_hashmap, "checking");
duke@435 3560 JvmtiTagHashmapEntry* next = entry->next();
duke@435 3561 other_hashmap->add(o, entry);
duke@435 3562 entry = next;
duke@435 3563 promoted++;
duke@435 3564 }
duke@435 3565
duke@435 3566 // stats
duke@435 3567 if (TraceJVMTIObjectTagging) {
duke@435 3568 int total_moves = promoted + moved;
duke@435 3569
duke@435 3570 int post_total = 0;
duke@435 3571 for (int i=0; i<n_hashmaps; i++) {
duke@435 3572 post_total += _hashmap[i]->_entry_count;
duke@435 3573 }
duke@435 3574 int pre_total = post_total + freed;
duke@435 3575
duke@435 3576 tty->print("(%d->%d, %d freed, %d promoted, %d total moves)",
duke@435 3577 pre_total, post_total, freed, promoted, total_moves);
duke@435 3578 }
duke@435 3579 }

mercurial