Tue, 26 Mar 2013 15:00:34 +0100
8009382: Add JVM_Get{Field|Method}TypeAnnotations
Reviewed-by: dcubed, rbackman
Contributed-by: Joel Borggren-Franck <joel.franck@oracle.com>
1 /*
2 * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/symbolTable.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvmtifiles/jvmtiEnv.hpp"
30 #include "oops/instanceMirrorKlass.hpp"
31 #include "oops/objArrayKlass.hpp"
32 #include "oops/oop.inline2.hpp"
33 #include "prims/jvmtiEventController.hpp"
34 #include "prims/jvmtiEventController.inline.hpp"
35 #include "prims/jvmtiExport.hpp"
36 #include "prims/jvmtiImpl.hpp"
37 #include "prims/jvmtiTagMap.hpp"
38 #include "runtime/biasedLocking.hpp"
39 #include "runtime/javaCalls.hpp"
40 #include "runtime/jniHandles.hpp"
41 #include "runtime/mutex.hpp"
42 #include "runtime/mutexLocker.hpp"
43 #include "runtime/reflectionUtils.hpp"
44 #include "runtime/vframe.hpp"
45 #include "runtime/vmThread.hpp"
46 #include "runtime/vm_operations.hpp"
47 #include "services/serviceUtil.hpp"
48 #include "utilities/macros.hpp"
49 #if INCLUDE_ALL_GCS
50 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
51 #endif // INCLUDE_ALL_GCS
53 // JvmtiTagHashmapEntry
54 //
55 // Each entry encapsulates a reference to the tagged object
56 // and the tag value. In addition an entry includes a next pointer which
57 // is used to chain entries together.
59 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
60 private:
61 friend class JvmtiTagMap;
63 oop _object; // tagged object
64 jlong _tag; // the tag
65 JvmtiTagHashmapEntry* _next; // next on the list
67 inline void init(oop object, jlong tag) {
68 _object = object;
69 _tag = tag;
70 _next = NULL;
71 }
73 // constructor
74 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); }
76 public:
78 // accessor methods
79 inline oop object() const { return _object; }
80 inline oop* object_addr() { return &_object; }
81 inline jlong tag() const { return _tag; }
83 inline void set_tag(jlong tag) {
84 assert(tag != 0, "can't be zero");
85 _tag = tag;
86 }
88 inline JvmtiTagHashmapEntry* next() const { return _next; }
89 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
90 };
93 // JvmtiTagHashmap
94 //
95 // A hashmap is essentially a table of pointers to entries. Entries
96 // are hashed to a location, or position in the table, and then
97 // chained from that location. The "key" for hashing is address of
98 // the object, or oop. The "value" is the tag value.
99 //
100 // A hashmap maintains a count of the number entries in the hashmap
101 // and resizes if the number of entries exceeds a given threshold.
102 // The threshold is specified as a percentage of the size - for
103 // example a threshold of 0.75 will trigger the hashmap to resize
104 // if the number of entries is >75% of table size.
105 //
106 // A hashmap provides functions for adding, removing, and finding
107 // entries. It also provides a function to iterate over all entries
108 // in the hashmap.
110 class JvmtiTagHashmap : public CHeapObj<mtInternal> {
111 private:
112 friend class JvmtiTagMap;
114 enum {
115 small_trace_threshold = 10000, // threshold for tracing
116 medium_trace_threshold = 100000,
117 large_trace_threshold = 1000000,
118 initial_trace_threshold = small_trace_threshold
119 };
121 static int _sizes[]; // array of possible hashmap sizes
122 int _size; // actual size of the table
123 int _size_index; // index into size table
125 int _entry_count; // number of entries in the hashmap
127 float _load_factor; // load factor as a % of the size
128 int _resize_threshold; // computed threshold to trigger resizing.
129 bool _resizing_enabled; // indicates if hashmap can resize
131 int _trace_threshold; // threshold for trace messages
133 JvmtiTagHashmapEntry** _table; // the table of entries.
135 // private accessors
136 int resize_threshold() const { return _resize_threshold; }
137 int trace_threshold() const { return _trace_threshold; }
139 // initialize the hashmap
140 void init(int size_index=0, float load_factor=4.0f) {
141 int initial_size = _sizes[size_index];
142 _size_index = size_index;
143 _size = initial_size;
144 _entry_count = 0;
145 if (TraceJVMTIObjectTagging) {
146 _trace_threshold = initial_trace_threshold;
147 } else {
148 _trace_threshold = -1;
149 }
150 _load_factor = load_factor;
151 _resize_threshold = (int)(_load_factor * _size);
152 _resizing_enabled = true;
153 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
154 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
155 if (_table == NULL) {
156 vm_exit_out_of_memory(s, "unable to allocate initial hashtable for jvmti object tags");
157 }
158 for (int i=0; i<initial_size; i++) {
159 _table[i] = NULL;
160 }
161 }
163 // hash a given key (oop) with the specified size
164 static unsigned int hash(oop key, int size) {
165 // shift right to get better distribution (as these bits will be zero
166 // with aligned addresses)
167 unsigned int addr = (unsigned int)((intptr_t)key);
168 #ifdef _LP64
169 return (addr >> 3) % size;
170 #else
171 return (addr >> 2) % size;
172 #endif
173 }
175 // hash a given key (oop)
176 unsigned int hash(oop key) {
177 return hash(key, _size);
178 }
180 // resize the hashmap - allocates a large table and re-hashes
181 // all entries into the new table.
182 void resize() {
183 int new_size_index = _size_index+1;
184 int new_size = _sizes[new_size_index];
185 if (new_size < 0) {
186 // hashmap already at maximum capacity
187 return;
188 }
190 // allocate new table
191 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
192 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
193 if (new_table == NULL) {
194 warning("unable to allocate larger hashtable for jvmti object tags");
195 set_resizing_enabled(false);
196 return;
197 }
199 // initialize new table
200 int i;
201 for (i=0; i<new_size; i++) {
202 new_table[i] = NULL;
203 }
205 // rehash all entries into the new table
206 for (i=0; i<_size; i++) {
207 JvmtiTagHashmapEntry* entry = _table[i];
208 while (entry != NULL) {
209 JvmtiTagHashmapEntry* next = entry->next();
210 oop key = entry->object();
211 assert(key != NULL, "jni weak reference cleared!!");
212 unsigned int h = hash(key, new_size);
213 JvmtiTagHashmapEntry* anchor = new_table[h];
214 if (anchor == NULL) {
215 new_table[h] = entry;
216 entry->set_next(NULL);
217 } else {
218 entry->set_next(anchor);
219 new_table[h] = entry;
220 }
221 entry = next;
222 }
223 }
225 // free old table and update settings.
226 os::free((void*)_table);
227 _table = new_table;
228 _size_index = new_size_index;
229 _size = new_size;
231 // compute new resize threshold
232 _resize_threshold = (int)(_load_factor * _size);
233 }
236 // internal remove function - remove an entry at a given position in the
237 // table.
238 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
239 assert(pos >= 0 && pos < _size, "out of range");
240 if (prev == NULL) {
241 _table[pos] = entry->next();
242 } else {
243 prev->set_next(entry->next());
244 }
245 assert(_entry_count > 0, "checking");
246 _entry_count--;
247 }
249 // resizing switch
250 bool is_resizing_enabled() const { return _resizing_enabled; }
251 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; }
253 // debugging
254 void print_memory_usage();
255 void compute_next_trace_threshold();
257 public:
259 // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
260 // The preferred size is rounded down to an actual size.
261 JvmtiTagHashmap(int size, float load_factor=0.0f) {
262 int i=0;
263 while (_sizes[i] < size) {
264 if (_sizes[i] < 0) {
265 assert(i > 0, "sanity check");
266 i--;
267 break;
268 }
269 i++;
270 }
272 // if a load factor is specified then use it, otherwise use default
273 if (load_factor > 0.01f) {
274 init(i, load_factor);
275 } else {
276 init(i);
277 }
278 }
280 // create a JvmtiTagHashmap with default settings
281 JvmtiTagHashmap() {
282 init();
283 }
285 // release table when JvmtiTagHashmap destroyed
286 ~JvmtiTagHashmap() {
287 if (_table != NULL) {
288 os::free((void*)_table);
289 _table = NULL;
290 }
291 }
293 // accessors
294 int size() const { return _size; }
295 JvmtiTagHashmapEntry** table() const { return _table; }
296 int entry_count() const { return _entry_count; }
298 // find an entry in the hashmap, returns NULL if not found.
299 inline JvmtiTagHashmapEntry* find(oop key) {
300 unsigned int h = hash(key);
301 JvmtiTagHashmapEntry* entry = _table[h];
302 while (entry != NULL) {
303 if (entry->object() == key) {
304 return entry;
305 }
306 entry = entry->next();
307 }
308 return NULL;
309 }
312 // add a new entry to hashmap
313 inline void add(oop key, JvmtiTagHashmapEntry* entry) {
314 assert(key != NULL, "checking");
315 assert(find(key) == NULL, "duplicate detected");
316 unsigned int h = hash(key);
317 JvmtiTagHashmapEntry* anchor = _table[h];
318 if (anchor == NULL) {
319 _table[h] = entry;
320 entry->set_next(NULL);
321 } else {
322 entry->set_next(anchor);
323 _table[h] = entry;
324 }
326 _entry_count++;
327 if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
328 assert(TraceJVMTIObjectTagging, "should only get here when tracing");
329 print_memory_usage();
330 compute_next_trace_threshold();
331 }
333 // if the number of entries exceed the threshold then resize
334 if (entry_count() > resize_threshold() && is_resizing_enabled()) {
335 resize();
336 }
337 }
339 // remove an entry with the given key.
340 inline JvmtiTagHashmapEntry* remove(oop key) {
341 unsigned int h = hash(key);
342 JvmtiTagHashmapEntry* entry = _table[h];
343 JvmtiTagHashmapEntry* prev = NULL;
344 while (entry != NULL) {
345 if (key == entry->object()) {
346 break;
347 }
348 prev = entry;
349 entry = entry->next();
350 }
351 if (entry != NULL) {
352 remove(prev, h, entry);
353 }
354 return entry;
355 }
357 // iterate over all entries in the hashmap
358 void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
359 };
361 // possible hashmap sizes - odd primes that roughly double in size.
362 // To avoid excessive resizing the odd primes from 4801-76831 and
363 // 76831-307261 have been removed. The list must be terminated by -1.
364 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891,
365 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
368 // A supporting class for iterating over all entries in Hashmap
369 class JvmtiTagHashmapEntryClosure {
370 public:
371 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
372 };
375 // iterate over all entries in the hashmap
376 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
377 for (int i=0; i<_size; i++) {
378 JvmtiTagHashmapEntry* entry = _table[i];
379 JvmtiTagHashmapEntry* prev = NULL;
380 while (entry != NULL) {
381 // obtain the next entry before invoking do_entry - this is
382 // necessary because do_entry may remove the entry from the
383 // hashmap.
384 JvmtiTagHashmapEntry* next = entry->next();
385 closure->do_entry(entry);
386 entry = next;
387 }
388 }
389 }
391 // debugging
392 void JvmtiTagHashmap::print_memory_usage() {
393 intptr_t p = (intptr_t)this;
394 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
396 // table + entries in KB
397 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
398 entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
400 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
401 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
402 entry_count(), hashmap_usage, weak_globals_usage);
403 }
405 // compute threshold for the next trace message
406 void JvmtiTagHashmap::compute_next_trace_threshold() {
407 if (trace_threshold() < medium_trace_threshold) {
408 _trace_threshold += small_trace_threshold;
409 } else {
410 if (trace_threshold() < large_trace_threshold) {
411 _trace_threshold += medium_trace_threshold;
412 } else {
413 _trace_threshold += large_trace_threshold;
414 }
415 }
416 }
418 // create a JvmtiTagMap
419 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
420 _env(env),
421 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
422 _free_entries(NULL),
423 _free_entries_count(0)
424 {
425 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
426 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
428 _hashmap = new JvmtiTagHashmap();
430 // finally add us to the environment
431 ((JvmtiEnvBase *)env)->set_tag_map(this);
432 }
435 // destroy a JvmtiTagMap
436 JvmtiTagMap::~JvmtiTagMap() {
438 // no lock acquired as we assume the enclosing environment is
439 // also being destroryed.
440 ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
442 JvmtiTagHashmapEntry** table = _hashmap->table();
443 for (int j = 0; j < _hashmap->size(); j++) {
444 JvmtiTagHashmapEntry* entry = table[j];
445 while (entry != NULL) {
446 JvmtiTagHashmapEntry* next = entry->next();
447 delete entry;
448 entry = next;
449 }
450 }
452 // finally destroy the hashmap
453 delete _hashmap;
454 _hashmap = NULL;
456 // remove any entries on the free list
457 JvmtiTagHashmapEntry* entry = _free_entries;
458 while (entry != NULL) {
459 JvmtiTagHashmapEntry* next = entry->next();
460 delete entry;
461 entry = next;
462 }
463 _free_entries = NULL;
464 }
466 // create a hashmap entry
467 // - if there's an entry on the (per-environment) free list then this
468 // is returned. Otherwise an new entry is allocated.
469 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
470 assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
471 JvmtiTagHashmapEntry* entry;
472 if (_free_entries == NULL) {
473 entry = new JvmtiTagHashmapEntry(ref, tag);
474 } else {
475 assert(_free_entries_count > 0, "mismatched _free_entries_count");
476 _free_entries_count--;
477 entry = _free_entries;
478 _free_entries = entry->next();
479 entry->init(ref, tag);
480 }
481 return entry;
482 }
484 // destroy an entry by returning it to the free list
485 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
486 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
487 // limit the size of the free list
488 if (_free_entries_count >= max_free_entries) {
489 delete entry;
490 } else {
491 entry->set_next(_free_entries);
492 _free_entries = entry;
493 _free_entries_count++;
494 }
495 }
497 // returns the tag map for the given environments. If the tag map
498 // doesn't exist then it is created.
499 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
500 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
501 if (tag_map == NULL) {
502 MutexLocker mu(JvmtiThreadState_lock);
503 tag_map = ((JvmtiEnvBase*)env)->tag_map();
504 if (tag_map == NULL) {
505 tag_map = new JvmtiTagMap(env);
506 }
507 } else {
508 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
509 }
510 return tag_map;
511 }
513 // iterate over all entries in the tag map.
514 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
515 hashmap()->entry_iterate(closure);
516 }
518 // returns true if the hashmaps are empty
519 bool JvmtiTagMap::is_empty() {
520 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
521 return hashmap()->entry_count() == 0;
522 }
525 // Return the tag value for an object, or 0 if the object is
526 // not tagged
527 //
528 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
529 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
530 if (entry == NULL) {
531 return 0;
532 } else {
533 return entry->tag();
534 }
535 }
538 // A CallbackWrapper is a support class for querying and tagging an object
539 // around a callback to a profiler. The constructor does pre-callback
540 // work to get the tag value, klass tag value, ... and the destructor
541 // does the post-callback work of tagging or untagging the object.
542 //
543 // {
544 // CallbackWrapper wrapper(tag_map, o);
545 //
546 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
547 //
548 // } // wrapper goes out of scope here which results in the destructor
549 // checking to see if the object has been tagged, untagged, or the
550 // tag value has changed.
551 //
552 class CallbackWrapper : public StackObj {
553 private:
554 JvmtiTagMap* _tag_map;
555 JvmtiTagHashmap* _hashmap;
556 JvmtiTagHashmapEntry* _entry;
557 oop _o;
558 jlong _obj_size;
559 jlong _obj_tag;
560 jlong _klass_tag;
562 protected:
563 JvmtiTagMap* tag_map() const { return _tag_map; }
565 // invoked post-callback to tag, untag, or update the tag of an object
566 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
567 JvmtiTagHashmapEntry* entry, jlong obj_tag);
568 public:
569 CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
570 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
571 "MT unsafe or must be VM thread");
573 // object to tag
574 _o = o;
576 // object size
577 _obj_size = (jlong)_o->size() * wordSize;
579 // record the context
580 _tag_map = tag_map;
581 _hashmap = tag_map->hashmap();
582 _entry = _hashmap->find(_o);
584 // get object tag
585 _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
587 // get the class and the class's tag value
588 assert(SystemDictionary::Class_klass()->oop_is_instanceMirror(), "Is not?");
590 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
591 }
593 ~CallbackWrapper() {
594 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
595 }
597 inline jlong* obj_tag_p() { return &_obj_tag; }
598 inline jlong obj_size() const { return _obj_size; }
599 inline jlong obj_tag() const { return _obj_tag; }
600 inline jlong klass_tag() const { return _klass_tag; }
601 };
605 // callback post-callback to tag, untag, or update the tag of an object
606 void inline CallbackWrapper::post_callback_tag_update(oop o,
607 JvmtiTagHashmap* hashmap,
608 JvmtiTagHashmapEntry* entry,
609 jlong obj_tag) {
610 if (entry == NULL) {
611 if (obj_tag != 0) {
612 // callback has tagged the object
613 assert(Thread::current()->is_VM_thread(), "must be VMThread");
614 entry = tag_map()->create_entry(o, obj_tag);
615 hashmap->add(o, entry);
616 }
617 } else {
618 // object was previously tagged - the callback may have untagged
619 // the object or changed the tag value
620 if (obj_tag == 0) {
622 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
623 assert(entry_removed == entry, "checking");
624 tag_map()->destroy_entry(entry);
626 } else {
627 if (obj_tag != entry->tag()) {
628 entry->set_tag(obj_tag);
629 }
630 }
631 }
632 }
634 // An extended CallbackWrapper used when reporting an object reference
635 // to the agent.
636 //
637 // {
638 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
639 //
640 // (*callback)(wrapper.klass_tag(),
641 // wrapper.obj_size(),
642 // wrapper.obj_tag_p()
643 // wrapper.referrer_tag_p(), ...)
644 //
645 // } // wrapper goes out of scope here which results in the destructor
646 // checking to see if the referrer object has been tagged, untagged,
647 // or the tag value has changed.
648 //
649 class TwoOopCallbackWrapper : public CallbackWrapper {
650 private:
651 bool _is_reference_to_self;
652 JvmtiTagHashmap* _referrer_hashmap;
653 JvmtiTagHashmapEntry* _referrer_entry;
654 oop _referrer;
655 jlong _referrer_obj_tag;
656 jlong _referrer_klass_tag;
657 jlong* _referrer_tag_p;
659 bool is_reference_to_self() const { return _is_reference_to_self; }
661 public:
662 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
663 CallbackWrapper(tag_map, o)
664 {
665 // self reference needs to be handled in a special way
666 _is_reference_to_self = (referrer == o);
668 if (_is_reference_to_self) {
669 _referrer_klass_tag = klass_tag();
670 _referrer_tag_p = obj_tag_p();
671 } else {
672 _referrer = referrer;
673 // record the context
674 _referrer_hashmap = tag_map->hashmap();
675 _referrer_entry = _referrer_hashmap->find(_referrer);
677 // get object tag
678 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
679 _referrer_tag_p = &_referrer_obj_tag;
681 // get referrer class tag.
682 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
683 }
684 }
686 ~TwoOopCallbackWrapper() {
687 if (!is_reference_to_self()){
688 post_callback_tag_update(_referrer,
689 _referrer_hashmap,
690 _referrer_entry,
691 _referrer_obj_tag);
692 }
693 }
695 // address of referrer tag
696 // (for a self reference this will return the same thing as obj_tag_p())
697 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
699 // referrer's class tag
700 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
701 };
703 // tag an object
704 //
705 // This function is performance critical. If many threads attempt to tag objects
706 // around the same time then it's possible that the Mutex associated with the
707 // tag map will be a hot lock.
708 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
709 MutexLocker ml(lock());
711 // resolve the object
712 oop o = JNIHandles::resolve_non_null(object);
714 // see if the object is already tagged
715 JvmtiTagHashmap* hashmap = _hashmap;
716 JvmtiTagHashmapEntry* entry = hashmap->find(o);
718 // if the object is not already tagged then we tag it
719 if (entry == NULL) {
720 if (tag != 0) {
721 entry = create_entry(o, tag);
722 hashmap->add(o, entry);
723 } else {
724 // no-op
725 }
726 } else {
727 // if the object is already tagged then we either update
728 // the tag (if a new tag value has been provided)
729 // or remove the object if the new tag value is 0.
730 if (tag == 0) {
731 hashmap->remove(o);
732 destroy_entry(entry);
733 } else {
734 entry->set_tag(tag);
735 }
736 }
737 }
739 // get the tag for an object
740 jlong JvmtiTagMap::get_tag(jobject object) {
741 MutexLocker ml(lock());
743 // resolve the object
744 oop o = JNIHandles::resolve_non_null(object);
746 return tag_for(this, o);
747 }
750 // Helper class used to describe the static or instance fields of a class.
751 // For each field it holds the field index (as defined by the JVMTI specification),
752 // the field type, and the offset.
754 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
755 private:
756 int _field_index;
757 int _field_offset;
758 char _field_type;
759 public:
760 ClassFieldDescriptor(int index, char type, int offset) :
761 _field_index(index), _field_type(type), _field_offset(offset) {
762 }
763 int field_index() const { return _field_index; }
764 char field_type() const { return _field_type; }
765 int field_offset() const { return _field_offset; }
766 };
768 class ClassFieldMap: public CHeapObj<mtInternal> {
769 private:
770 enum {
771 initial_field_count = 5
772 };
774 // list of field descriptors
775 GrowableArray<ClassFieldDescriptor*>* _fields;
777 // constructor
778 ClassFieldMap();
780 // add a field
781 void add(int index, char type, int offset);
783 // returns the field count for the given class
784 static int compute_field_count(instanceKlassHandle ikh);
786 public:
787 ~ClassFieldMap();
789 // access
790 int field_count() { return _fields->length(); }
791 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
793 // functions to create maps of static or instance fields
794 static ClassFieldMap* create_map_of_static_fields(Klass* k);
795 static ClassFieldMap* create_map_of_instance_fields(oop obj);
796 };
798 ClassFieldMap::ClassFieldMap() {
799 _fields = new (ResourceObj::C_HEAP, mtInternal)
800 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
801 }
803 ClassFieldMap::~ClassFieldMap() {
804 for (int i=0; i<_fields->length(); i++) {
805 delete _fields->at(i);
806 }
807 delete _fields;
808 }
810 void ClassFieldMap::add(int index, char type, int offset) {
811 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
812 _fields->append(field);
813 }
815 // Returns a heap allocated ClassFieldMap to describe the static fields
816 // of the given class.
817 //
818 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
819 HandleMark hm;
820 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
822 // create the field map
823 ClassFieldMap* field_map = new ClassFieldMap();
825 FilteredFieldStream f(ikh, false, false);
826 int max_field_index = f.field_count()-1;
828 int index = 0;
829 for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
830 // ignore instance fields
831 if (!fld.access_flags().is_static()) {
832 continue;
833 }
834 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
835 }
836 return field_map;
837 }
839 // Returns a heap allocated ClassFieldMap to describe the instance fields
840 // of the given class. All instance fields are included (this means public
841 // and private fields declared in superclasses and superinterfaces too).
842 //
843 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
844 HandleMark hm;
845 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
847 // create the field map
848 ClassFieldMap* field_map = new ClassFieldMap();
850 FilteredFieldStream f(ikh, false, false);
852 int max_field_index = f.field_count()-1;
854 int index = 0;
855 for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
856 // ignore static fields
857 if (fld.access_flags().is_static()) {
858 continue;
859 }
860 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
861 }
863 return field_map;
864 }
866 // Helper class used to cache a ClassFileMap for the instance fields of
867 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
868 // heap iteration and avoid creating a field map for each object in the heap
869 // (only need to create the map when the first instance of a class is encountered).
870 //
871 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
872 private:
873 enum {
874 initial_class_count = 200
875 };
876 ClassFieldMap* _field_map;
878 ClassFieldMap* field_map() const { return _field_map; }
880 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
881 ~JvmtiCachedClassFieldMap();
883 static GrowableArray<InstanceKlass*>* _class_list;
884 static void add_to_class_list(InstanceKlass* ik);
886 public:
887 // returns the field map for a given object (returning map cached
888 // by InstanceKlass if possible
889 static ClassFieldMap* get_map_of_instance_fields(oop obj);
891 // removes the field map from all instanceKlasses - should be
892 // called before VM operation completes
893 static void clear_cache();
895 // returns the number of ClassFieldMap cached by instanceKlasses
896 static int cached_field_map_count();
897 };
899 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
901 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
902 _field_map = field_map;
903 }
905 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
906 if (_field_map != NULL) {
907 delete _field_map;
908 }
909 }
911 // Marker class to ensure that the class file map cache is only used in a defined
912 // scope.
913 class ClassFieldMapCacheMark : public StackObj {
914 private:
915 static bool _is_active;
916 public:
917 ClassFieldMapCacheMark() {
918 assert(Thread::current()->is_VM_thread(), "must be VMThread");
919 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
920 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
921 _is_active = true;
922 }
923 ~ClassFieldMapCacheMark() {
924 JvmtiCachedClassFieldMap::clear_cache();
925 _is_active = false;
926 }
927 static bool is_active() { return _is_active; }
928 };
930 bool ClassFieldMapCacheMark::_is_active;
933 // record that the given InstanceKlass is caching a field map
934 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
935 if (_class_list == NULL) {
936 _class_list = new (ResourceObj::C_HEAP, mtInternal)
937 GrowableArray<InstanceKlass*>(initial_class_count, true);
938 }
939 _class_list->push(ik);
940 }
942 // returns the instance field map for the given object
943 // (returns field map cached by the InstanceKlass if possible)
944 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
945 assert(Thread::current()->is_VM_thread(), "must be VMThread");
946 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
948 Klass* k = obj->klass();
949 InstanceKlass* ik = InstanceKlass::cast(k);
951 // return cached map if possible
952 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
953 if (cached_map != NULL) {
954 assert(cached_map->field_map() != NULL, "missing field list");
955 return cached_map->field_map();
956 } else {
957 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
958 cached_map = new JvmtiCachedClassFieldMap(field_map);
959 ik->set_jvmti_cached_class_field_map(cached_map);
960 add_to_class_list(ik);
961 return field_map;
962 }
963 }
965 // remove the fields maps cached from all instanceKlasses
966 void JvmtiCachedClassFieldMap::clear_cache() {
967 assert(Thread::current()->is_VM_thread(), "must be VMThread");
968 if (_class_list != NULL) {
969 for (int i = 0; i < _class_list->length(); i++) {
970 InstanceKlass* ik = _class_list->at(i);
971 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
972 assert(cached_map != NULL, "should not be NULL");
973 ik->set_jvmti_cached_class_field_map(NULL);
974 delete cached_map; // deletes the encapsulated field map
975 }
976 delete _class_list;
977 _class_list = NULL;
978 }
979 }
981 // returns the number of ClassFieldMap cached by instanceKlasses
982 int JvmtiCachedClassFieldMap::cached_field_map_count() {
983 return (_class_list == NULL) ? 0 : _class_list->length();
984 }
986 // helper function to indicate if an object is filtered by its tag or class tag
987 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
988 jlong klass_tag,
989 int heap_filter) {
990 // apply the heap filter
991 if (obj_tag != 0) {
992 // filter out tagged objects
993 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
994 } else {
995 // filter out untagged objects
996 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
997 }
998 if (klass_tag != 0) {
999 // filter out objects with tagged classes
1000 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1001 } else {
1002 // filter out objects with untagged classes.
1003 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1004 }
1005 return false;
1006 }
1008 // helper function to indicate if an object is filtered by a klass filter
1009 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
1010 if (!klass_filter.is_null()) {
1011 if (obj->klass() != klass_filter()) {
1012 return true;
1013 }
1014 }
1015 return false;
1016 }
1018 // helper function to tell if a field is a primitive field or not
1019 static inline bool is_primitive_field_type(char type) {
1020 return (type != 'L' && type != '[');
1021 }
1023 // helper function to copy the value from location addr to jvalue.
1024 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1025 switch (value_type) {
1026 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1027 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
1028 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
1029 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
1030 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
1031 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
1032 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
1033 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
1034 default: ShouldNotReachHere();
1035 }
1036 }
1038 // helper function to invoke string primitive value callback
1039 // returns visit control flags
1040 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1041 CallbackWrapper* wrapper,
1042 oop str,
1043 void* user_data)
1044 {
1045 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1047 // get the string value and length
1048 // (string value may be offset from the base)
1049 int s_len = java_lang_String::length(str);
1050 typeArrayOop s_value = java_lang_String::value(str);
1051 int s_offset = java_lang_String::offset(str);
1052 jchar* value;
1053 if (s_len > 0) {
1054 value = s_value->char_at_addr(s_offset);
1055 } else {
1056 value = (jchar*) s_value->base(T_CHAR);
1057 }
1059 // invoke the callback
1060 return (*cb)(wrapper->klass_tag(),
1061 wrapper->obj_size(),
1062 wrapper->obj_tag_p(),
1063 value,
1064 (jint)s_len,
1065 user_data);
1066 }
1068 // helper function to invoke string primitive value callback
1069 // returns visit control flags
1070 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1071 CallbackWrapper* wrapper,
1072 oop obj,
1073 void* user_data)
1074 {
1075 assert(obj->is_typeArray(), "not a primitive array");
1077 // get base address of first element
1078 typeArrayOop array = typeArrayOop(obj);
1079 BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1080 void* elements = array->base(type);
1082 // jvmtiPrimitiveType is defined so this mapping is always correct
1083 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1085 return (*cb)(wrapper->klass_tag(),
1086 wrapper->obj_size(),
1087 wrapper->obj_tag_p(),
1088 (jint)array->length(),
1089 elem_type,
1090 elements,
1091 user_data);
1092 }
1094 // helper function to invoke the primitive field callback for all static fields
1095 // of a given class
1096 static jint invoke_primitive_field_callback_for_static_fields
1097 (CallbackWrapper* wrapper,
1098 oop obj,
1099 jvmtiPrimitiveFieldCallback cb,
1100 void* user_data)
1101 {
1102 // for static fields only the index will be set
1103 static jvmtiHeapReferenceInfo reference_info = { 0 };
1105 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1106 if (java_lang_Class::is_primitive(obj)) {
1107 return 0;
1108 }
1109 Klass* klass = java_lang_Class::as_Klass(obj);
1111 // ignore classes for object and type arrays
1112 if (!klass->oop_is_instance()) {
1113 return 0;
1114 }
1116 // ignore classes which aren't linked yet
1117 InstanceKlass* ik = InstanceKlass::cast(klass);
1118 if (!ik->is_linked()) {
1119 return 0;
1120 }
1122 // get the field map
1123 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1125 // invoke the callback for each static primitive field
1126 for (int i=0; i<field_map->field_count(); i++) {
1127 ClassFieldDescriptor* field = field_map->field_at(i);
1129 // ignore non-primitive fields
1130 char type = field->field_type();
1131 if (!is_primitive_field_type(type)) {
1132 continue;
1133 }
1134 // one-to-one mapping
1135 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1137 // get offset and field value
1138 int offset = field->field_offset();
1139 address addr = (address)klass->java_mirror() + offset;
1140 jvalue value;
1141 copy_to_jvalue(&value, addr, value_type);
1143 // field index
1144 reference_info.field.index = field->field_index();
1146 // invoke the callback
1147 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1148 &reference_info,
1149 wrapper->klass_tag(),
1150 wrapper->obj_tag_p(),
1151 value,
1152 value_type,
1153 user_data);
1154 if (res & JVMTI_VISIT_ABORT) {
1155 delete field_map;
1156 return res;
1157 }
1158 }
1160 delete field_map;
1161 return 0;
1162 }
1164 // helper function to invoke the primitive field callback for all instance fields
1165 // of a given object
1166 static jint invoke_primitive_field_callback_for_instance_fields(
1167 CallbackWrapper* wrapper,
1168 oop obj,
1169 jvmtiPrimitiveFieldCallback cb,
1170 void* user_data)
1171 {
1172 // for instance fields only the index will be set
1173 static jvmtiHeapReferenceInfo reference_info = { 0 };
1175 // get the map of the instance fields
1176 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1178 // invoke the callback for each instance primitive field
1179 for (int i=0; i<fields->field_count(); i++) {
1180 ClassFieldDescriptor* field = fields->field_at(i);
1182 // ignore non-primitive fields
1183 char type = field->field_type();
1184 if (!is_primitive_field_type(type)) {
1185 continue;
1186 }
1187 // one-to-one mapping
1188 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1190 // get offset and field value
1191 int offset = field->field_offset();
1192 address addr = (address)obj + offset;
1193 jvalue value;
1194 copy_to_jvalue(&value, addr, value_type);
1196 // field index
1197 reference_info.field.index = field->field_index();
1199 // invoke the callback
1200 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1201 &reference_info,
1202 wrapper->klass_tag(),
1203 wrapper->obj_tag_p(),
1204 value,
1205 value_type,
1206 user_data);
1207 if (res & JVMTI_VISIT_ABORT) {
1208 return res;
1209 }
1210 }
1211 return 0;
1212 }
1215 // VM operation to iterate over all objects in the heap (both reachable
1216 // and unreachable)
1217 class VM_HeapIterateOperation: public VM_Operation {
1218 private:
1219 ObjectClosure* _blk;
1220 public:
1221 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1223 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1224 void doit() {
1225 // allows class files maps to be cached during iteration
1226 ClassFieldMapCacheMark cm;
1228 // make sure that heap is parsable (fills TLABs with filler objects)
1229 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
1231 // Verify heap before iteration - if the heap gets corrupted then
1232 // JVMTI's IterateOverHeap will crash.
1233 if (VerifyBeforeIteration) {
1234 Universe::verify();
1235 }
1237 // do the iteration
1238 // If this operation encounters a bad object when using CMS,
1239 // consider using safe_object_iterate() which avoids perm gen
1240 // objects that may contain bad references.
1241 Universe::heap()->object_iterate(_blk);
1242 }
1244 };
1247 // An ObjectClosure used to support the deprecated IterateOverHeap and
1248 // IterateOverInstancesOfClass functions
1249 class IterateOverHeapObjectClosure: public ObjectClosure {
1250 private:
1251 JvmtiTagMap* _tag_map;
1252 KlassHandle _klass;
1253 jvmtiHeapObjectFilter _object_filter;
1254 jvmtiHeapObjectCallback _heap_object_callback;
1255 const void* _user_data;
1257 // accessors
1258 JvmtiTagMap* tag_map() const { return _tag_map; }
1259 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
1260 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1261 KlassHandle klass() const { return _klass; }
1262 const void* user_data() const { return _user_data; }
1264 // indicates if iteration has been aborted
1265 bool _iteration_aborted;
1266 bool is_iteration_aborted() const { return _iteration_aborted; }
1267 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
1269 public:
1270 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1271 KlassHandle klass,
1272 jvmtiHeapObjectFilter object_filter,
1273 jvmtiHeapObjectCallback heap_object_callback,
1274 const void* user_data) :
1275 _tag_map(tag_map),
1276 _klass(klass),
1277 _object_filter(object_filter),
1278 _heap_object_callback(heap_object_callback),
1279 _user_data(user_data),
1280 _iteration_aborted(false)
1281 {
1282 }
1284 void do_object(oop o);
1285 };
1287 // invoked for each object in the heap
1288 void IterateOverHeapObjectClosure::do_object(oop o) {
1289 // check if iteration has been halted
1290 if (is_iteration_aborted()) return;
1292 // ignore any objects that aren't visible to profiler
1293 if (!ServiceUtil::visible_oop(o)) return;
1295 // instanceof check when filtering by klass
1296 if (!klass().is_null() && !o->is_a(klass()())) {
1297 return;
1298 }
1299 // prepare for the calllback
1300 CallbackWrapper wrapper(tag_map(), o);
1302 // if the object is tagged and we're only interested in untagged objects
1303 // then don't invoke the callback. Similiarly, if the object is untagged
1304 // and we're only interested in tagged objects we skip the callback.
1305 if (wrapper.obj_tag() != 0) {
1306 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1307 } else {
1308 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1309 }
1311 // invoke the agent's callback
1312 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1313 wrapper.obj_size(),
1314 wrapper.obj_tag_p(),
1315 (void*)user_data());
1316 if (control == JVMTI_ITERATION_ABORT) {
1317 set_iteration_aborted(true);
1318 }
1319 }
1321 // An ObjectClosure used to support the IterateThroughHeap function
1322 class IterateThroughHeapObjectClosure: public ObjectClosure {
1323 private:
1324 JvmtiTagMap* _tag_map;
1325 KlassHandle _klass;
1326 int _heap_filter;
1327 const jvmtiHeapCallbacks* _callbacks;
1328 const void* _user_data;
1330 // accessor functions
1331 JvmtiTagMap* tag_map() const { return _tag_map; }
1332 int heap_filter() const { return _heap_filter; }
1333 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
1334 KlassHandle klass() const { return _klass; }
1335 const void* user_data() const { return _user_data; }
1337 // indicates if the iteration has been aborted
1338 bool _iteration_aborted;
1339 bool is_iteration_aborted() const { return _iteration_aborted; }
1341 // used to check the visit control flags. If the abort flag is set
1342 // then we set the iteration aborted flag so that the iteration completes
1343 // without processing any further objects
1344 bool check_flags_for_abort(jint flags) {
1345 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1346 if (is_abort) {
1347 _iteration_aborted = true;
1348 }
1349 return is_abort;
1350 }
1352 public:
1353 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1354 KlassHandle klass,
1355 int heap_filter,
1356 const jvmtiHeapCallbacks* heap_callbacks,
1357 const void* user_data) :
1358 _tag_map(tag_map),
1359 _klass(klass),
1360 _heap_filter(heap_filter),
1361 _callbacks(heap_callbacks),
1362 _user_data(user_data),
1363 _iteration_aborted(false)
1364 {
1365 }
1367 void do_object(oop o);
1368 };
1370 // invoked for each object in the heap
1371 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1372 // check if iteration has been halted
1373 if (is_iteration_aborted()) return;
1375 // ignore any objects that aren't visible to profiler
1376 if (!ServiceUtil::visible_oop(obj)) return;
1378 // apply class filter
1379 if (is_filtered_by_klass_filter(obj, klass())) return;
1381 // prepare for callback
1382 CallbackWrapper wrapper(tag_map(), obj);
1384 // check if filtered by the heap filter
1385 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1386 return;
1387 }
1389 // for arrays we need the length, otherwise -1
1390 bool is_array = obj->is_array();
1391 int len = is_array ? arrayOop(obj)->length() : -1;
1393 // invoke the object callback (if callback is provided)
1394 if (callbacks()->heap_iteration_callback != NULL) {
1395 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1396 jint res = (*cb)(wrapper.klass_tag(),
1397 wrapper.obj_size(),
1398 wrapper.obj_tag_p(),
1399 (jint)len,
1400 (void*)user_data());
1401 if (check_flags_for_abort(res)) return;
1402 }
1404 // for objects and classes we report primitive fields if callback provided
1405 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1406 jint res;
1407 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1408 if (obj->klass() == SystemDictionary::Class_klass()) {
1409 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1410 obj,
1411 cb,
1412 (void*)user_data());
1413 } else {
1414 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1415 obj,
1416 cb,
1417 (void*)user_data());
1418 }
1419 if (check_flags_for_abort(res)) return;
1420 }
1422 // string callback
1423 if (!is_array &&
1424 callbacks()->string_primitive_value_callback != NULL &&
1425 obj->klass() == SystemDictionary::String_klass()) {
1426 jint res = invoke_string_value_callback(
1427 callbacks()->string_primitive_value_callback,
1428 &wrapper,
1429 obj,
1430 (void*)user_data() );
1431 if (check_flags_for_abort(res)) return;
1432 }
1434 // array callback
1435 if (is_array &&
1436 callbacks()->array_primitive_value_callback != NULL &&
1437 obj->is_typeArray()) {
1438 jint res = invoke_array_primitive_value_callback(
1439 callbacks()->array_primitive_value_callback,
1440 &wrapper,
1441 obj,
1442 (void*)user_data() );
1443 if (check_flags_for_abort(res)) return;
1444 }
1445 };
1448 // Deprecated function to iterate over all objects in the heap
1449 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1450 KlassHandle klass,
1451 jvmtiHeapObjectCallback heap_object_callback,
1452 const void* user_data)
1453 {
1454 MutexLocker ml(Heap_lock);
1455 IterateOverHeapObjectClosure blk(this,
1456 klass,
1457 object_filter,
1458 heap_object_callback,
1459 user_data);
1460 VM_HeapIterateOperation op(&blk);
1461 VMThread::execute(&op);
1462 }
1465 // Iterates over all objects in the heap
1466 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1467 KlassHandle klass,
1468 const jvmtiHeapCallbacks* callbacks,
1469 const void* user_data)
1470 {
1471 MutexLocker ml(Heap_lock);
1472 IterateThroughHeapObjectClosure blk(this,
1473 klass,
1474 heap_filter,
1475 callbacks,
1476 user_data);
1477 VM_HeapIterateOperation op(&blk);
1478 VMThread::execute(&op);
1479 }
1481 // support class for get_objects_with_tags
1483 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1484 private:
1485 JvmtiEnv* _env;
1486 jlong* _tags;
1487 jint _tag_count;
1489 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
1490 GrowableArray<uint64_t>* _tag_results; // collected tags
1492 public:
1493 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1494 _env = env;
1495 _tags = (jlong*)tags;
1496 _tag_count = tag_count;
1497 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1498 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1499 }
1501 ~TagObjectCollector() {
1502 delete _object_results;
1503 delete _tag_results;
1504 }
1506 // for each tagged object check if the tag value matches
1507 // - if it matches then we create a JNI local reference to the object
1508 // and record the reference and tag value.
1509 //
1510 void do_entry(JvmtiTagHashmapEntry* entry) {
1511 for (int i=0; i<_tag_count; i++) {
1512 if (_tags[i] == entry->tag()) {
1513 oop o = entry->object();
1514 assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1515 jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1516 _object_results->append(ref);
1517 _tag_results->append((uint64_t)entry->tag());
1518 }
1519 }
1520 }
1522 // return the results from the collection
1523 //
1524 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1525 jvmtiError error;
1526 int count = _object_results->length();
1527 assert(count >= 0, "sanity check");
1529 // if object_result_ptr is not NULL then allocate the result and copy
1530 // in the object references.
1531 if (object_result_ptr != NULL) {
1532 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1533 if (error != JVMTI_ERROR_NONE) {
1534 return error;
1535 }
1536 for (int i=0; i<count; i++) {
1537 (*object_result_ptr)[i] = _object_results->at(i);
1538 }
1539 }
1541 // if tag_result_ptr is not NULL then allocate the result and copy
1542 // in the tag values.
1543 if (tag_result_ptr != NULL) {
1544 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1545 if (error != JVMTI_ERROR_NONE) {
1546 if (object_result_ptr != NULL) {
1547 _env->Deallocate((unsigned char*)object_result_ptr);
1548 }
1549 return error;
1550 }
1551 for (int i=0; i<count; i++) {
1552 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1553 }
1554 }
1556 *count_ptr = count;
1557 return JVMTI_ERROR_NONE;
1558 }
1559 };
1561 // return the list of objects with the specified tags
1562 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1563 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1565 TagObjectCollector collector(env(), tags, count);
1566 {
1567 // iterate over all tagged objects
1568 MutexLocker ml(lock());
1569 entry_iterate(&collector);
1570 }
1571 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1572 }
1575 // ObjectMarker is used to support the marking objects when walking the
1576 // heap.
1577 //
1578 // This implementation uses the existing mark bits in an object for
1579 // marking. Objects that are marked must later have their headers restored.
1580 // As most objects are unlocked and don't have their identity hash computed
1581 // we don't have to save their headers. Instead we save the headers that
1582 // are "interesting". Later when the headers are restored this implementation
1583 // restores all headers to their initial value and then restores the few
1584 // objects that had interesting headers.
1585 //
1586 // Future work: This implementation currently uses growable arrays to save
1587 // the oop and header of interesting objects. As an optimization we could
1588 // use the same technique as the GC and make use of the unused area
1589 // between top() and end().
1590 //
1592 // An ObjectClosure used to restore the mark bits of an object
1593 class RestoreMarksClosure : public ObjectClosure {
1594 public:
1595 void do_object(oop o) {
1596 if (o != NULL) {
1597 markOop mark = o->mark();
1598 if (mark->is_marked()) {
1599 o->init_mark();
1600 }
1601 }
1602 }
1603 };
1605 // ObjectMarker provides the mark and visited functions
1606 class ObjectMarker : AllStatic {
1607 private:
1608 // saved headers
1609 static GrowableArray<oop>* _saved_oop_stack;
1610 static GrowableArray<markOop>* _saved_mark_stack;
1611 static bool _needs_reset; // do we need to reset mark bits?
1613 public:
1614 static void init(); // initialize
1615 static void done(); // clean-up
1617 static inline void mark(oop o); // mark an object
1618 static inline bool visited(oop o); // check if object has been visited
1620 static inline bool needs_reset() { return _needs_reset; }
1621 static inline void set_needs_reset(bool v) { _needs_reset = v; }
1622 };
1624 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1625 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1626 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default
1628 // initialize ObjectMarker - prepares for object marking
1629 void ObjectMarker::init() {
1630 assert(Thread::current()->is_VM_thread(), "must be VMThread");
1632 // prepare heap for iteration
1633 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
1635 // create stacks for interesting headers
1636 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1637 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1639 if (UseBiasedLocking) {
1640 BiasedLocking::preserve_marks();
1641 }
1642 }
1644 // Object marking is done so restore object headers
1645 void ObjectMarker::done() {
1646 // iterate over all objects and restore the mark bits to
1647 // their initial value
1648 RestoreMarksClosure blk;
1649 if (needs_reset()) {
1650 Universe::heap()->object_iterate(&blk);
1651 } else {
1652 // We don't need to reset mark bits on this call, but reset the
1653 // flag to the default for the next call.
1654 set_needs_reset(true);
1655 }
1657 // now restore the interesting headers
1658 for (int i = 0; i < _saved_oop_stack->length(); i++) {
1659 oop o = _saved_oop_stack->at(i);
1660 markOop mark = _saved_mark_stack->at(i);
1661 o->set_mark(mark);
1662 }
1664 if (UseBiasedLocking) {
1665 BiasedLocking::restore_marks();
1666 }
1668 // free the stacks
1669 delete _saved_oop_stack;
1670 delete _saved_mark_stack;
1671 }
1673 // mark an object
1674 inline void ObjectMarker::mark(oop o) {
1675 assert(Universe::heap()->is_in(o), "sanity check");
1676 assert(!o->mark()->is_marked(), "should only mark an object once");
1678 // object's mark word
1679 markOop mark = o->mark();
1681 if (mark->must_be_preserved(o)) {
1682 _saved_mark_stack->push(mark);
1683 _saved_oop_stack->push(o);
1684 }
1686 // mark the object
1687 o->set_mark(markOopDesc::prototype()->set_marked());
1688 }
1690 // return true if object is marked
1691 inline bool ObjectMarker::visited(oop o) {
1692 return o->mark()->is_marked();
1693 }
1695 // Stack allocated class to help ensure that ObjectMarker is used
1696 // correctly. Constructor initializes ObjectMarker, destructor calls
1697 // ObjectMarker's done() function to restore object headers.
1698 class ObjectMarkerController : public StackObj {
1699 public:
1700 ObjectMarkerController() {
1701 ObjectMarker::init();
1702 }
1703 ~ObjectMarkerController() {
1704 ObjectMarker::done();
1705 }
1706 };
1709 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1710 // (not performance critical as only used for roots)
1711 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1712 switch (kind) {
1713 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1714 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1715 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR;
1716 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
1717 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
1718 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
1719 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
1720 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
1721 }
1722 }
1724 // Base class for all heap walk contexts. The base class maintains a flag
1725 // to indicate if the context is valid or not.
1726 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1727 private:
1728 bool _valid;
1729 public:
1730 HeapWalkContext(bool valid) { _valid = valid; }
1731 void invalidate() { _valid = false; }
1732 bool is_valid() const { return _valid; }
1733 };
1735 // A basic heap walk context for the deprecated heap walking functions.
1736 // The context for a basic heap walk are the callbacks and fields used by
1737 // the referrer caching scheme.
1738 class BasicHeapWalkContext: public HeapWalkContext {
1739 private:
1740 jvmtiHeapRootCallback _heap_root_callback;
1741 jvmtiStackReferenceCallback _stack_ref_callback;
1742 jvmtiObjectReferenceCallback _object_ref_callback;
1744 // used for caching
1745 oop _last_referrer;
1746 jlong _last_referrer_tag;
1748 public:
1749 BasicHeapWalkContext() : HeapWalkContext(false) { }
1751 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1752 jvmtiStackReferenceCallback stack_ref_callback,
1753 jvmtiObjectReferenceCallback object_ref_callback) :
1754 HeapWalkContext(true),
1755 _heap_root_callback(heap_root_callback),
1756 _stack_ref_callback(stack_ref_callback),
1757 _object_ref_callback(object_ref_callback),
1758 _last_referrer(NULL),
1759 _last_referrer_tag(0) {
1760 }
1762 // accessors
1763 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
1764 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
1765 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
1767 oop last_referrer() const { return _last_referrer; }
1768 void set_last_referrer(oop referrer) { _last_referrer = referrer; }
1769 jlong last_referrer_tag() const { return _last_referrer_tag; }
1770 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1771 };
1773 // The advanced heap walk context for the FollowReferences functions.
1774 // The context is the callbacks, and the fields used for filtering.
1775 class AdvancedHeapWalkContext: public HeapWalkContext {
1776 private:
1777 jint _heap_filter;
1778 KlassHandle _klass_filter;
1779 const jvmtiHeapCallbacks* _heap_callbacks;
1781 public:
1782 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1784 AdvancedHeapWalkContext(jint heap_filter,
1785 KlassHandle klass_filter,
1786 const jvmtiHeapCallbacks* heap_callbacks) :
1787 HeapWalkContext(true),
1788 _heap_filter(heap_filter),
1789 _klass_filter(klass_filter),
1790 _heap_callbacks(heap_callbacks) {
1791 }
1793 // accessors
1794 jint heap_filter() const { return _heap_filter; }
1795 KlassHandle klass_filter() const { return _klass_filter; }
1797 const jvmtiHeapReferenceCallback heap_reference_callback() const {
1798 return _heap_callbacks->heap_reference_callback;
1799 };
1800 const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1801 return _heap_callbacks->primitive_field_callback;
1802 }
1803 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1804 return _heap_callbacks->array_primitive_value_callback;
1805 }
1806 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1807 return _heap_callbacks->string_primitive_value_callback;
1808 }
1809 };
1811 // The CallbackInvoker is a class with static functions that the heap walk can call
1812 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1813 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1814 // mode is for the newer FollowReferences function which supports a lot of
1815 // additional callbacks.
1816 class CallbackInvoker : AllStatic {
1817 private:
1818 // heap walk styles
1819 enum { basic, advanced };
1820 static int _heap_walk_type;
1821 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
1822 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
1824 // context for basic style heap walk
1825 static BasicHeapWalkContext _basic_context;
1826 static BasicHeapWalkContext* basic_context() {
1827 assert(_basic_context.is_valid(), "invalid");
1828 return &_basic_context;
1829 }
1831 // context for advanced style heap walk
1832 static AdvancedHeapWalkContext _advanced_context;
1833 static AdvancedHeapWalkContext* advanced_context() {
1834 assert(_advanced_context.is_valid(), "invalid");
1835 return &_advanced_context;
1836 }
1838 // context needed for all heap walks
1839 static JvmtiTagMap* _tag_map;
1840 static const void* _user_data;
1841 static GrowableArray<oop>* _visit_stack;
1843 // accessors
1844 static JvmtiTagMap* tag_map() { return _tag_map; }
1845 static const void* user_data() { return _user_data; }
1846 static GrowableArray<oop>* visit_stack() { return _visit_stack; }
1848 // if the object hasn't been visited then push it onto the visit stack
1849 // so that it will be visited later
1850 static inline bool check_for_visit(oop obj) {
1851 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1852 return true;
1853 }
1855 // invoke basic style callbacks
1856 static inline bool invoke_basic_heap_root_callback
1857 (jvmtiHeapRootKind root_kind, oop obj);
1858 static inline bool invoke_basic_stack_ref_callback
1859 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1860 int slot, oop obj);
1861 static inline bool invoke_basic_object_reference_callback
1862 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1864 // invoke advanced style callbacks
1865 static inline bool invoke_advanced_heap_root_callback
1866 (jvmtiHeapReferenceKind ref_kind, oop obj);
1867 static inline bool invoke_advanced_stack_ref_callback
1868 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1869 jmethodID method, jlocation bci, jint slot, oop obj);
1870 static inline bool invoke_advanced_object_reference_callback
1871 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1873 // used to report the value of primitive fields
1874 static inline bool report_primitive_field
1875 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1877 public:
1878 // initialize for basic mode
1879 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1880 GrowableArray<oop>* visit_stack,
1881 const void* user_data,
1882 BasicHeapWalkContext context);
1884 // initialize for advanced mode
1885 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1886 GrowableArray<oop>* visit_stack,
1887 const void* user_data,
1888 AdvancedHeapWalkContext context);
1890 // functions to report roots
1891 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1892 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1893 jmethodID m, oop o);
1894 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1895 jmethodID method, jlocation bci, jint slot, oop o);
1897 // functions to report references
1898 static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1899 static inline bool report_class_reference(oop referrer, oop referree);
1900 static inline bool report_class_loader_reference(oop referrer, oop referree);
1901 static inline bool report_signers_reference(oop referrer, oop referree);
1902 static inline bool report_protection_domain_reference(oop referrer, oop referree);
1903 static inline bool report_superclass_reference(oop referrer, oop referree);
1904 static inline bool report_interface_reference(oop referrer, oop referree);
1905 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1906 static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1907 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1908 static inline bool report_primitive_array_values(oop array);
1909 static inline bool report_string_value(oop str);
1910 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1911 static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1912 };
1914 // statics
1915 int CallbackInvoker::_heap_walk_type;
1916 BasicHeapWalkContext CallbackInvoker::_basic_context;
1917 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1918 JvmtiTagMap* CallbackInvoker::_tag_map;
1919 const void* CallbackInvoker::_user_data;
1920 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1922 // initialize for basic heap walk (IterateOverReachableObjects et al)
1923 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1924 GrowableArray<oop>* visit_stack,
1925 const void* user_data,
1926 BasicHeapWalkContext context) {
1927 _tag_map = tag_map;
1928 _visit_stack = visit_stack;
1929 _user_data = user_data;
1930 _basic_context = context;
1931 _advanced_context.invalidate(); // will trigger assertion if used
1932 _heap_walk_type = basic;
1933 }
1935 // initialize for advanced heap walk (FollowReferences)
1936 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1937 GrowableArray<oop>* visit_stack,
1938 const void* user_data,
1939 AdvancedHeapWalkContext context) {
1940 _tag_map = tag_map;
1941 _visit_stack = visit_stack;
1942 _user_data = user_data;
1943 _advanced_context = context;
1944 _basic_context.invalidate(); // will trigger assertion if used
1945 _heap_walk_type = advanced;
1946 }
1949 // invoke basic style heap root callback
1950 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1951 assert(ServiceUtil::visible_oop(obj), "checking");
1953 // if we heap roots should be reported
1954 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1955 if (cb == NULL) {
1956 return check_for_visit(obj);
1957 }
1959 CallbackWrapper wrapper(tag_map(), obj);
1960 jvmtiIterationControl control = (*cb)(root_kind,
1961 wrapper.klass_tag(),
1962 wrapper.obj_size(),
1963 wrapper.obj_tag_p(),
1964 (void*)user_data());
1965 // push root to visit stack when following references
1966 if (control == JVMTI_ITERATION_CONTINUE &&
1967 basic_context()->object_ref_callback() != NULL) {
1968 visit_stack()->push(obj);
1969 }
1970 return control != JVMTI_ITERATION_ABORT;
1971 }
1973 // invoke basic style stack ref callback
1974 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1975 jlong thread_tag,
1976 jint depth,
1977 jmethodID method,
1978 jint slot,
1979 oop obj) {
1980 assert(ServiceUtil::visible_oop(obj), "checking");
1982 // if we stack refs should be reported
1983 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1984 if (cb == NULL) {
1985 return check_for_visit(obj);
1986 }
1988 CallbackWrapper wrapper(tag_map(), obj);
1989 jvmtiIterationControl control = (*cb)(root_kind,
1990 wrapper.klass_tag(),
1991 wrapper.obj_size(),
1992 wrapper.obj_tag_p(),
1993 thread_tag,
1994 depth,
1995 method,
1996 slot,
1997 (void*)user_data());
1998 // push root to visit stack when following references
1999 if (control == JVMTI_ITERATION_CONTINUE &&
2000 basic_context()->object_ref_callback() != NULL) {
2001 visit_stack()->push(obj);
2002 }
2003 return control != JVMTI_ITERATION_ABORT;
2004 }
2006 // invoke basic style object reference callback
2007 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2008 oop referrer,
2009 oop referree,
2010 jint index) {
2012 assert(ServiceUtil::visible_oop(referrer), "checking");
2013 assert(ServiceUtil::visible_oop(referree), "checking");
2015 BasicHeapWalkContext* context = basic_context();
2017 // callback requires the referrer's tag. If it's the same referrer
2018 // as the last call then we use the cached value.
2019 jlong referrer_tag;
2020 if (referrer == context->last_referrer()) {
2021 referrer_tag = context->last_referrer_tag();
2022 } else {
2023 referrer_tag = tag_for(tag_map(), referrer);
2024 }
2026 // do the callback
2027 CallbackWrapper wrapper(tag_map(), referree);
2028 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2029 jvmtiIterationControl control = (*cb)(ref_kind,
2030 wrapper.klass_tag(),
2031 wrapper.obj_size(),
2032 wrapper.obj_tag_p(),
2033 referrer_tag,
2034 index,
2035 (void*)user_data());
2037 // record referrer and referrer tag. For self-references record the
2038 // tag value from the callback as this might differ from referrer_tag.
2039 context->set_last_referrer(referrer);
2040 if (referrer == referree) {
2041 context->set_last_referrer_tag(*wrapper.obj_tag_p());
2042 } else {
2043 context->set_last_referrer_tag(referrer_tag);
2044 }
2046 if (control == JVMTI_ITERATION_CONTINUE) {
2047 return check_for_visit(referree);
2048 } else {
2049 return control != JVMTI_ITERATION_ABORT;
2050 }
2051 }
2053 // invoke advanced style heap root callback
2054 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2055 oop obj) {
2056 assert(ServiceUtil::visible_oop(obj), "checking");
2058 AdvancedHeapWalkContext* context = advanced_context();
2060 // check that callback is provided
2061 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2062 if (cb == NULL) {
2063 return check_for_visit(obj);
2064 }
2066 // apply class filter
2067 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2068 return check_for_visit(obj);
2069 }
2071 // setup the callback wrapper
2072 CallbackWrapper wrapper(tag_map(), obj);
2074 // apply tag filter
2075 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2076 wrapper.klass_tag(),
2077 context->heap_filter())) {
2078 return check_for_visit(obj);
2079 }
2081 // for arrays we need the length, otherwise -1
2082 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2084 // invoke the callback
2085 jint res = (*cb)(ref_kind,
2086 NULL, // referrer info
2087 wrapper.klass_tag(),
2088 0, // referrer_class_tag is 0 for heap root
2089 wrapper.obj_size(),
2090 wrapper.obj_tag_p(),
2091 NULL, // referrer_tag_p
2092 len,
2093 (void*)user_data());
2094 if (res & JVMTI_VISIT_ABORT) {
2095 return false;// referrer class tag
2096 }
2097 if (res & JVMTI_VISIT_OBJECTS) {
2098 check_for_visit(obj);
2099 }
2100 return true;
2101 }
2103 // report a reference from a thread stack to an object
2104 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2105 jlong thread_tag,
2106 jlong tid,
2107 int depth,
2108 jmethodID method,
2109 jlocation bci,
2110 jint slot,
2111 oop obj) {
2112 assert(ServiceUtil::visible_oop(obj), "checking");
2114 AdvancedHeapWalkContext* context = advanced_context();
2116 // check that callback is provider
2117 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2118 if (cb == NULL) {
2119 return check_for_visit(obj);
2120 }
2122 // apply class filter
2123 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2124 return check_for_visit(obj);
2125 }
2127 // setup the callback wrapper
2128 CallbackWrapper wrapper(tag_map(), obj);
2130 // apply tag filter
2131 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2132 wrapper.klass_tag(),
2133 context->heap_filter())) {
2134 return check_for_visit(obj);
2135 }
2137 // setup the referrer info
2138 jvmtiHeapReferenceInfo reference_info;
2139 reference_info.stack_local.thread_tag = thread_tag;
2140 reference_info.stack_local.thread_id = tid;
2141 reference_info.stack_local.depth = depth;
2142 reference_info.stack_local.method = method;
2143 reference_info.stack_local.location = bci;
2144 reference_info.stack_local.slot = slot;
2146 // for arrays we need the length, otherwise -1
2147 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2149 // call into the agent
2150 int res = (*cb)(ref_kind,
2151 &reference_info,
2152 wrapper.klass_tag(),
2153 0, // referrer_class_tag is 0 for heap root (stack)
2154 wrapper.obj_size(),
2155 wrapper.obj_tag_p(),
2156 NULL, // referrer_tag is 0 for root
2157 len,
2158 (void*)user_data());
2160 if (res & JVMTI_VISIT_ABORT) {
2161 return false;
2162 }
2163 if (res & JVMTI_VISIT_OBJECTS) {
2164 check_for_visit(obj);
2165 }
2166 return true;
2167 }
2169 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2170 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2171 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
2172 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
2173 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2174 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2175 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
2176 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2178 // invoke the object reference callback to report a reference
2179 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2180 oop referrer,
2181 oop obj,
2182 jint index)
2183 {
2184 // field index is only valid field in reference_info
2185 static jvmtiHeapReferenceInfo reference_info = { 0 };
2187 assert(ServiceUtil::visible_oop(referrer), "checking");
2188 assert(ServiceUtil::visible_oop(obj), "checking");
2190 AdvancedHeapWalkContext* context = advanced_context();
2192 // check that callback is provider
2193 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2194 if (cb == NULL) {
2195 return check_for_visit(obj);
2196 }
2198 // apply class filter
2199 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2200 return check_for_visit(obj);
2201 }
2203 // setup the callback wrapper
2204 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2206 // apply tag filter
2207 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2208 wrapper.klass_tag(),
2209 context->heap_filter())) {
2210 return check_for_visit(obj);
2211 }
2213 // field index is only valid field in reference_info
2214 reference_info.field.index = index;
2216 // for arrays we need the length, otherwise -1
2217 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2219 // invoke the callback
2220 int res = (*cb)(ref_kind,
2221 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2222 wrapper.klass_tag(),
2223 wrapper.referrer_klass_tag(),
2224 wrapper.obj_size(),
2225 wrapper.obj_tag_p(),
2226 wrapper.referrer_tag_p(),
2227 len,
2228 (void*)user_data());
2230 if (res & JVMTI_VISIT_ABORT) {
2231 return false;
2232 }
2233 if (res & JVMTI_VISIT_OBJECTS) {
2234 check_for_visit(obj);
2235 }
2236 return true;
2237 }
2239 // report a "simple root"
2240 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2241 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2242 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2243 assert(ServiceUtil::visible_oop(obj), "checking");
2245 if (is_basic_heap_walk()) {
2246 // map to old style root kind
2247 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2248 return invoke_basic_heap_root_callback(root_kind, obj);
2249 } else {
2250 assert(is_advanced_heap_walk(), "wrong heap walk type");
2251 return invoke_advanced_heap_root_callback(kind, obj);
2252 }
2253 }
2256 // invoke the primitive array values
2257 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2258 assert(obj->is_typeArray(), "not a primitive array");
2260 AdvancedHeapWalkContext* context = advanced_context();
2261 assert(context->array_primitive_value_callback() != NULL, "no callback");
2263 // apply class filter
2264 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2265 return true;
2266 }
2268 CallbackWrapper wrapper(tag_map(), obj);
2270 // apply tag filter
2271 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2272 wrapper.klass_tag(),
2273 context->heap_filter())) {
2274 return true;
2275 }
2277 // invoke the callback
2278 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2279 &wrapper,
2280 obj,
2281 (void*)user_data());
2282 return (!(res & JVMTI_VISIT_ABORT));
2283 }
2285 // invoke the string value callback
2286 inline bool CallbackInvoker::report_string_value(oop str) {
2287 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2289 AdvancedHeapWalkContext* context = advanced_context();
2290 assert(context->string_primitive_value_callback() != NULL, "no callback");
2292 // apply class filter
2293 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2294 return true;
2295 }
2297 CallbackWrapper wrapper(tag_map(), str);
2299 // apply tag filter
2300 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2301 wrapper.klass_tag(),
2302 context->heap_filter())) {
2303 return true;
2304 }
2306 // invoke the callback
2307 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2308 &wrapper,
2309 str,
2310 (void*)user_data());
2311 return (!(res & JVMTI_VISIT_ABORT));
2312 }
2314 // invoke the primitive field callback
2315 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2316 oop obj,
2317 jint index,
2318 address addr,
2319 char type)
2320 {
2321 // for primitive fields only the index will be set
2322 static jvmtiHeapReferenceInfo reference_info = { 0 };
2324 AdvancedHeapWalkContext* context = advanced_context();
2325 assert(context->primitive_field_callback() != NULL, "no callback");
2327 // apply class filter
2328 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2329 return true;
2330 }
2332 CallbackWrapper wrapper(tag_map(), obj);
2334 // apply tag filter
2335 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2336 wrapper.klass_tag(),
2337 context->heap_filter())) {
2338 return true;
2339 }
2341 // the field index in the referrer
2342 reference_info.field.index = index;
2344 // map the type
2345 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2347 // setup the jvalue
2348 jvalue value;
2349 copy_to_jvalue(&value, addr, value_type);
2351 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2352 int res = (*cb)(ref_kind,
2353 &reference_info,
2354 wrapper.klass_tag(),
2355 wrapper.obj_tag_p(),
2356 value,
2357 value_type,
2358 (void*)user_data());
2359 return (!(res & JVMTI_VISIT_ABORT));
2360 }
2363 // instance field
2364 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2365 jint index,
2366 address value,
2367 char type) {
2368 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2369 obj,
2370 index,
2371 value,
2372 type);
2373 }
2375 // static field
2376 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2377 jint index,
2378 address value,
2379 char type) {
2380 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2381 obj,
2382 index,
2383 value,
2384 type);
2385 }
2387 // report a JNI local (root object) to the profiler
2388 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2389 if (is_basic_heap_walk()) {
2390 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2391 thread_tag,
2392 depth,
2393 m,
2394 -1,
2395 obj);
2396 } else {
2397 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2398 thread_tag, tid,
2399 depth,
2400 m,
2401 (jlocation)-1,
2402 -1,
2403 obj);
2404 }
2405 }
2408 // report a local (stack reference, root object)
2409 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2410 jlong tid,
2411 jint depth,
2412 jmethodID method,
2413 jlocation bci,
2414 jint slot,
2415 oop obj) {
2416 if (is_basic_heap_walk()) {
2417 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2418 thread_tag,
2419 depth,
2420 method,
2421 slot,
2422 obj);
2423 } else {
2424 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2425 thread_tag,
2426 tid,
2427 depth,
2428 method,
2429 bci,
2430 slot,
2431 obj);
2432 }
2433 }
2435 // report an object referencing a class.
2436 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2437 if (is_basic_heap_walk()) {
2438 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2439 } else {
2440 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2441 }
2442 }
2444 // report a class referencing its class loader.
2445 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2446 if (is_basic_heap_walk()) {
2447 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2448 } else {
2449 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2450 }
2451 }
2453 // report a class referencing its signers.
2454 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2455 if (is_basic_heap_walk()) {
2456 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2457 } else {
2458 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2459 }
2460 }
2462 // report a class referencing its protection domain..
2463 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2464 if (is_basic_heap_walk()) {
2465 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2466 } else {
2467 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2468 }
2469 }
2471 // report a class referencing its superclass.
2472 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2473 if (is_basic_heap_walk()) {
2474 // Send this to be consistent with past implementation
2475 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2476 } else {
2477 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2478 }
2479 }
2481 // report a class referencing one of its interfaces.
2482 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2483 if (is_basic_heap_walk()) {
2484 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2485 } else {
2486 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2487 }
2488 }
2490 // report a class referencing one of its static fields.
2491 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2492 if (is_basic_heap_walk()) {
2493 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2494 } else {
2495 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2496 }
2497 }
2499 // report an array referencing an element object
2500 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2501 if (is_basic_heap_walk()) {
2502 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2503 } else {
2504 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2505 }
2506 }
2508 // report an object referencing an instance field object
2509 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2510 if (is_basic_heap_walk()) {
2511 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2512 } else {
2513 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2514 }
2515 }
2517 // report an array referencing an element object
2518 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2519 if (is_basic_heap_walk()) {
2520 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2521 } else {
2522 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2523 }
2524 }
2526 // A supporting closure used to process simple roots
2527 class SimpleRootsClosure : public OopClosure {
2528 private:
2529 jvmtiHeapReferenceKind _kind;
2530 bool _continue;
2532 jvmtiHeapReferenceKind root_kind() { return _kind; }
2534 public:
2535 void set_kind(jvmtiHeapReferenceKind kind) {
2536 _kind = kind;
2537 _continue = true;
2538 }
2540 inline bool stopped() {
2541 return !_continue;
2542 }
2544 void do_oop(oop* obj_p) {
2545 // iteration has terminated
2546 if (stopped()) {
2547 return;
2548 }
2550 // ignore null or deleted handles
2551 oop o = *obj_p;
2552 if (o == NULL || o == JNIHandles::deleted_handle()) {
2553 return;
2554 }
2556 assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2558 jvmtiHeapReferenceKind kind = root_kind();
2559 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2560 // SystemDictionary::always_strong_oops_do reports the application
2561 // class loader as a root. We want this root to be reported as
2562 // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2563 if (!o->is_instanceMirror()) {
2564 kind = JVMTI_HEAP_REFERENCE_OTHER;
2565 }
2566 }
2568 // some objects are ignored - in the case of simple
2569 // roots it's mostly Symbol*s that we are skipping
2570 // here.
2571 if (!ServiceUtil::visible_oop(o)) {
2572 return;
2573 }
2575 // invoke the callback
2576 _continue = CallbackInvoker::report_simple_root(kind, o);
2578 }
2579 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2580 };
2582 // A supporting closure used to process JNI locals
2583 class JNILocalRootsClosure : public OopClosure {
2584 private:
2585 jlong _thread_tag;
2586 jlong _tid;
2587 jint _depth;
2588 jmethodID _method;
2589 bool _continue;
2590 public:
2591 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2592 _thread_tag = thread_tag;
2593 _tid = tid;
2594 _depth = depth;
2595 _method = method;
2596 _continue = true;
2597 }
2599 inline bool stopped() {
2600 return !_continue;
2601 }
2603 void do_oop(oop* obj_p) {
2604 // iteration has terminated
2605 if (stopped()) {
2606 return;
2607 }
2609 // ignore null or deleted handles
2610 oop o = *obj_p;
2611 if (o == NULL || o == JNIHandles::deleted_handle()) {
2612 return;
2613 }
2615 if (!ServiceUtil::visible_oop(o)) {
2616 return;
2617 }
2619 // invoke the callback
2620 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2621 }
2622 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2623 };
2626 // A VM operation to iterate over objects that are reachable from
2627 // a set of roots or an initial object.
2628 //
2629 // For VM_HeapWalkOperation the set of roots used is :-
2630 //
2631 // - All JNI global references
2632 // - All inflated monitors
2633 // - All classes loaded by the boot class loader (or all classes
2634 // in the event that class unloading is disabled)
2635 // - All java threads
2636 // - For each java thread then all locals and JNI local references
2637 // on the thread's execution stack
2638 // - All visible/explainable objects from Universes::oops_do
2639 //
2640 class VM_HeapWalkOperation: public VM_Operation {
2641 private:
2642 enum {
2643 initial_visit_stack_size = 4000
2644 };
2646 bool _is_advanced_heap_walk; // indicates FollowReferences
2647 JvmtiTagMap* _tag_map;
2648 Handle _initial_object;
2649 GrowableArray<oop>* _visit_stack; // the visit stack
2651 bool _collecting_heap_roots; // are we collecting roots
2652 bool _following_object_refs; // are we following object references
2654 bool _reporting_primitive_fields; // optional reporting
2655 bool _reporting_primitive_array_values;
2656 bool _reporting_string_values;
2658 GrowableArray<oop>* create_visit_stack() {
2659 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2660 }
2662 // accessors
2663 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
2664 JvmtiTagMap* tag_map() const { return _tag_map; }
2665 Handle initial_object() const { return _initial_object; }
2667 bool is_following_references() const { return _following_object_refs; }
2669 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
2670 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2671 bool is_reporting_string_values() const { return _reporting_string_values; }
2673 GrowableArray<oop>* visit_stack() const { return _visit_stack; }
2675 // iterate over the various object types
2676 inline bool iterate_over_array(oop o);
2677 inline bool iterate_over_type_array(oop o);
2678 inline bool iterate_over_class(oop o);
2679 inline bool iterate_over_object(oop o);
2681 // root collection
2682 inline bool collect_simple_roots();
2683 inline bool collect_stack_roots();
2684 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2686 // visit an object
2687 inline bool visit(oop o);
2689 public:
2690 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2691 Handle initial_object,
2692 BasicHeapWalkContext callbacks,
2693 const void* user_data);
2695 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2696 Handle initial_object,
2697 AdvancedHeapWalkContext callbacks,
2698 const void* user_data);
2700 ~VM_HeapWalkOperation();
2702 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2703 void doit();
2704 };
2707 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2708 Handle initial_object,
2709 BasicHeapWalkContext callbacks,
2710 const void* user_data) {
2711 _is_advanced_heap_walk = false;
2712 _tag_map = tag_map;
2713 _initial_object = initial_object;
2714 _following_object_refs = (callbacks.object_ref_callback() != NULL);
2715 _reporting_primitive_fields = false;
2716 _reporting_primitive_array_values = false;
2717 _reporting_string_values = false;
2718 _visit_stack = create_visit_stack();
2721 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2722 }
2724 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2725 Handle initial_object,
2726 AdvancedHeapWalkContext callbacks,
2727 const void* user_data) {
2728 _is_advanced_heap_walk = true;
2729 _tag_map = tag_map;
2730 _initial_object = initial_object;
2731 _following_object_refs = true;
2732 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2733 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2734 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2735 _visit_stack = create_visit_stack();
2737 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2738 }
2740 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2741 if (_following_object_refs) {
2742 assert(_visit_stack != NULL, "checking");
2743 delete _visit_stack;
2744 _visit_stack = NULL;
2745 }
2746 }
2748 // an array references its class and has a reference to
2749 // each element in the array
2750 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2751 objArrayOop array = objArrayOop(o);
2753 // array reference to its class
2754 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2755 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2756 return false;
2757 }
2759 // iterate over the array and report each reference to a
2760 // non-null element
2761 for (int index=0; index<array->length(); index++) {
2762 oop elem = array->obj_at(index);
2763 if (elem == NULL) {
2764 continue;
2765 }
2767 // report the array reference o[index] = elem
2768 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2769 return false;
2770 }
2771 }
2772 return true;
2773 }
2775 // a type array references its class
2776 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2777 Klass* k = o->klass();
2778 oop mirror = k->java_mirror();
2779 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2780 return false;
2781 }
2783 // report the array contents if required
2784 if (is_reporting_primitive_array_values()) {
2785 if (!CallbackInvoker::report_primitive_array_values(o)) {
2786 return false;
2787 }
2788 }
2789 return true;
2790 }
2792 // verify that a static oop field is in range
2793 static inline bool verify_static_oop(InstanceKlass* ik,
2794 oop mirror, int offset) {
2795 address obj_p = (address)mirror + offset;
2796 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2797 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2798 assert(end >= start, "sanity check");
2800 if (obj_p >= start && obj_p < end) {
2801 return true;
2802 } else {
2803 return false;
2804 }
2805 }
2807 // a class references its super class, interfaces, class loader, ...
2808 // and finally its static fields
2809 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2810 int i;
2811 Klass* klass = java_lang_Class::as_Klass(java_class);
2813 if (klass->oop_is_instance()) {
2814 InstanceKlass* ik = InstanceKlass::cast(klass);
2816 // ignore the class if it's has been initialized yet
2817 if (!ik->is_linked()) {
2818 return true;
2819 }
2821 // get the java mirror
2822 oop mirror = klass->java_mirror();
2824 // super (only if something more interesting than java.lang.Object)
2825 Klass* java_super = ik->java_super();
2826 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2827 oop super = java_super->java_mirror();
2828 if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2829 return false;
2830 }
2831 }
2833 // class loader
2834 oop cl = ik->class_loader();
2835 if (cl != NULL) {
2836 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2837 return false;
2838 }
2839 }
2841 // protection domain
2842 oop pd = ik->protection_domain();
2843 if (pd != NULL) {
2844 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2845 return false;
2846 }
2847 }
2849 // signers
2850 oop signers = ik->signers();
2851 if (signers != NULL) {
2852 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2853 return false;
2854 }
2855 }
2857 // references from the constant pool
2858 {
2859 ConstantPool* const pool = ik->constants();
2860 for (int i = 1; i < pool->length(); i++) {
2861 constantTag tag = pool->tag_at(i).value();
2862 if (tag.is_string() || tag.is_klass()) {
2863 oop entry;
2864 if (tag.is_string()) {
2865 entry = pool->resolved_string_at(i);
2866 // If the entry is non-null it is resolved.
2867 if (entry == NULL) continue;
2868 } else {
2869 entry = pool->resolved_klass_at(i)->java_mirror();
2870 }
2871 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2872 return false;
2873 }
2874 }
2875 }
2876 }
2878 // interfaces
2879 // (These will already have been reported as references from the constant pool
2880 // but are specified by IterateOverReachableObjects and must be reported).
2881 Array<Klass*>* interfaces = ik->local_interfaces();
2882 for (i = 0; i < interfaces->length(); i++) {
2883 oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2884 if (interf == NULL) {
2885 continue;
2886 }
2887 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2888 return false;
2889 }
2890 }
2892 // iterate over the static fields
2894 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2895 for (i=0; i<field_map->field_count(); i++) {
2896 ClassFieldDescriptor* field = field_map->field_at(i);
2897 char type = field->field_type();
2898 if (!is_primitive_field_type(type)) {
2899 oop fld_o = mirror->obj_field(field->field_offset());
2900 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2901 if (fld_o != NULL) {
2902 int slot = field->field_index();
2903 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2904 delete field_map;
2905 return false;
2906 }
2907 }
2908 } else {
2909 if (is_reporting_primitive_fields()) {
2910 address addr = (address)mirror + field->field_offset();
2911 int slot = field->field_index();
2912 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2913 delete field_map;
2914 return false;
2915 }
2916 }
2917 }
2918 }
2919 delete field_map;
2921 return true;
2922 }
2924 return true;
2925 }
2927 // an object references a class and its instance fields
2928 // (static fields are ignored here as we report these as
2929 // references from the class).
2930 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2931 // reference to the class
2932 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2933 return false;
2934 }
2936 // iterate over instance fields
2937 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2938 for (int i=0; i<field_map->field_count(); i++) {
2939 ClassFieldDescriptor* field = field_map->field_at(i);
2940 char type = field->field_type();
2941 if (!is_primitive_field_type(type)) {
2942 oop fld_o = o->obj_field(field->field_offset());
2943 // ignore any objects that aren't visible to profiler
2944 if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2945 assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2946 "have references to Klass* anymore");
2947 int slot = field->field_index();
2948 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2949 return false;
2950 }
2951 }
2952 } else {
2953 if (is_reporting_primitive_fields()) {
2954 // primitive instance field
2955 address addr = (address)o + field->field_offset();
2956 int slot = field->field_index();
2957 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2958 return false;
2959 }
2960 }
2961 }
2962 }
2964 // if the object is a java.lang.String
2965 if (is_reporting_string_values() &&
2966 o->klass() == SystemDictionary::String_klass()) {
2967 if (!CallbackInvoker::report_string_value(o)) {
2968 return false;
2969 }
2970 }
2971 return true;
2972 }
2975 // Collects all simple (non-stack) roots except for threads;
2976 // threads are handled in collect_stack_roots() as an optimization.
2977 // if there's a heap root callback provided then the callback is
2978 // invoked for each simple root.
2979 // if an object reference callback is provided then all simple
2980 // roots are pushed onto the marking stack so that they can be
2981 // processed later
2982 //
2983 inline bool VM_HeapWalkOperation::collect_simple_roots() {
2984 SimpleRootsClosure blk;
2986 // JNI globals
2987 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
2988 JNIHandles::oops_do(&blk);
2989 if (blk.stopped()) {
2990 return false;
2991 }
2993 // Preloaded classes and loader from the system dictionary
2994 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
2995 SystemDictionary::always_strong_oops_do(&blk);
2996 KlassToOopClosure klass_blk(&blk);
2997 ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false);
2998 if (blk.stopped()) {
2999 return false;
3000 }
3002 // Inflated monitors
3003 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3004 ObjectSynchronizer::oops_do(&blk);
3005 if (blk.stopped()) {
3006 return false;
3007 }
3009 // threads are now handled in collect_stack_roots()
3011 // Other kinds of roots maintained by HotSpot
3012 // Many of these won't be visible but others (such as instances of important
3013 // exceptions) will be visible.
3014 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3015 Universe::oops_do(&blk);
3017 // If there are any non-perm roots in the code cache, visit them.
3018 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3019 CodeBlobToOopClosure look_in_blobs(&blk, false);
3020 CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3022 return true;
3023 }
3025 // Walk the stack of a given thread and find all references (locals
3026 // and JNI calls) and report these as stack references
3027 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3028 JNILocalRootsClosure* blk)
3029 {
3030 oop threadObj = java_thread->threadObj();
3031 assert(threadObj != NULL, "sanity check");
3033 // only need to get the thread's tag once per thread
3034 jlong thread_tag = tag_for(_tag_map, threadObj);
3036 // also need the thread id
3037 jlong tid = java_lang_Thread::thread_id(threadObj);
3040 if (java_thread->has_last_Java_frame()) {
3042 // vframes are resource allocated
3043 Thread* current_thread = Thread::current();
3044 ResourceMark rm(current_thread);
3045 HandleMark hm(current_thread);
3047 RegisterMap reg_map(java_thread);
3048 frame f = java_thread->last_frame();
3049 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread);
3051 bool is_top_frame = true;
3052 int depth = 0;
3053 frame* last_entry_frame = NULL;
3055 while (vf != NULL) {
3056 if (vf->is_java_frame()) {
3058 // java frame (interpreted, compiled, ...)
3059 javaVFrame *jvf = javaVFrame::cast(vf);
3061 // the jmethodID
3062 jmethodID method = jvf->method()->jmethod_id();
3064 if (!(jvf->method()->is_native())) {
3065 jlocation bci = (jlocation)jvf->bci();
3066 StackValueCollection* locals = jvf->locals();
3067 for (int slot=0; slot<locals->size(); slot++) {
3068 if (locals->at(slot)->type() == T_OBJECT) {
3069 oop o = locals->obj_at(slot)();
3070 if (o == NULL) {
3071 continue;
3072 }
3074 // stack reference
3075 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3076 bci, slot, o)) {
3077 return false;
3078 }
3079 }
3080 }
3081 } else {
3082 blk->set_context(thread_tag, tid, depth, method);
3083 if (is_top_frame) {
3084 // JNI locals for the top frame.
3085 java_thread->active_handles()->oops_do(blk);
3086 } else {
3087 if (last_entry_frame != NULL) {
3088 // JNI locals for the entry frame
3089 assert(last_entry_frame->is_entry_frame(), "checking");
3090 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3091 }
3092 }
3093 }
3094 last_entry_frame = NULL;
3095 depth++;
3096 } else {
3097 // externalVFrame - for an entry frame then we report the JNI locals
3098 // when we find the corresponding javaVFrame
3099 frame* fr = vf->frame_pointer();
3100 assert(fr != NULL, "sanity check");
3101 if (fr->is_entry_frame()) {
3102 last_entry_frame = fr;
3103 }
3104 }
3106 vf = vf->sender();
3107 is_top_frame = false;
3108 }
3109 } else {
3110 // no last java frame but there may be JNI locals
3111 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3112 java_thread->active_handles()->oops_do(blk);
3113 }
3114 return true;
3115 }
3118 // Collects the simple roots for all threads and collects all
3119 // stack roots - for each thread it walks the execution
3120 // stack to find all references and local JNI refs.
3121 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3122 JNILocalRootsClosure blk;
3123 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3124 oop threadObj = thread->threadObj();
3125 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3126 // Collect the simple root for this thread before we
3127 // collect its stack roots
3128 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3129 threadObj)) {
3130 return false;
3131 }
3132 if (!collect_stack_roots(thread, &blk)) {
3133 return false;
3134 }
3135 }
3136 }
3137 return true;
3138 }
3140 // visit an object
3141 // first mark the object as visited
3142 // second get all the outbound references from this object (in other words, all
3143 // the objects referenced by this object).
3144 //
3145 bool VM_HeapWalkOperation::visit(oop o) {
3146 // mark object as visited
3147 assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3148 ObjectMarker::mark(o);
3150 // instance
3151 if (o->is_instance()) {
3152 if (o->klass() == SystemDictionary::Class_klass()) {
3153 if (!java_lang_Class::is_primitive(o)) {
3154 // a java.lang.Class
3155 return iterate_over_class(o);
3156 }
3157 } else {
3158 return iterate_over_object(o);
3159 }
3160 }
3162 // object array
3163 if (o->is_objArray()) {
3164 return iterate_over_array(o);
3165 }
3167 // type array
3168 if (o->is_typeArray()) {
3169 return iterate_over_type_array(o);
3170 }
3172 return true;
3173 }
3175 void VM_HeapWalkOperation::doit() {
3176 ResourceMark rm;
3177 ObjectMarkerController marker;
3178 ClassFieldMapCacheMark cm;
3180 assert(visit_stack()->is_empty(), "visit stack must be empty");
3182 // the heap walk starts with an initial object or the heap roots
3183 if (initial_object().is_null()) {
3184 // If either collect_stack_roots() or collect_simple_roots()
3185 // returns false at this point, then there are no mark bits
3186 // to reset.
3187 ObjectMarker::set_needs_reset(false);
3189 // Calling collect_stack_roots() before collect_simple_roots()
3190 // can result in a big performance boost for an agent that is
3191 // focused on analyzing references in the thread stacks.
3192 if (!collect_stack_roots()) return;
3194 if (!collect_simple_roots()) return;
3196 // no early return so enable heap traversal to reset the mark bits
3197 ObjectMarker::set_needs_reset(true);
3198 } else {
3199 visit_stack()->push(initial_object()());
3200 }
3202 // object references required
3203 if (is_following_references()) {
3205 // visit each object until all reachable objects have been
3206 // visited or the callback asked to terminate the iteration.
3207 while (!visit_stack()->is_empty()) {
3208 oop o = visit_stack()->pop();
3209 if (!ObjectMarker::visited(o)) {
3210 if (!visit(o)) {
3211 break;
3212 }
3213 }
3214 }
3215 }
3216 }
3218 // iterate over all objects that are reachable from a set of roots
3219 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3220 jvmtiStackReferenceCallback stack_ref_callback,
3221 jvmtiObjectReferenceCallback object_ref_callback,
3222 const void* user_data) {
3223 MutexLocker ml(Heap_lock);
3224 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3225 VM_HeapWalkOperation op(this, Handle(), context, user_data);
3226 VMThread::execute(&op);
3227 }
3229 // iterate over all objects that are reachable from a given object
3230 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3231 jvmtiObjectReferenceCallback object_ref_callback,
3232 const void* user_data) {
3233 oop obj = JNIHandles::resolve(object);
3234 Handle initial_object(Thread::current(), obj);
3236 MutexLocker ml(Heap_lock);
3237 BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3238 VM_HeapWalkOperation op(this, initial_object, context, user_data);
3239 VMThread::execute(&op);
3240 }
3242 // follow references from an initial object or the GC roots
3243 void JvmtiTagMap::follow_references(jint heap_filter,
3244 KlassHandle klass,
3245 jobject object,
3246 const jvmtiHeapCallbacks* callbacks,
3247 const void* user_data)
3248 {
3249 oop obj = JNIHandles::resolve(object);
3250 Handle initial_object(Thread::current(), obj);
3252 MutexLocker ml(Heap_lock);
3253 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3254 VM_HeapWalkOperation op(this, initial_object, context, user_data);
3255 VMThread::execute(&op);
3256 }
3259 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3260 // No locks during VM bring-up (0 threads) and no safepoints after main
3261 // thread creation and before VMThread creation (1 thread); initial GC
3262 // verification can happen in that window which gets to here.
3263 assert(Threads::number_of_threads() <= 1 ||
3264 SafepointSynchronize::is_at_safepoint(),
3265 "must be executed at a safepoint");
3266 if (JvmtiEnv::environments_might_exist()) {
3267 JvmtiEnvIterator it;
3268 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3269 JvmtiTagMap* tag_map = env->tag_map();
3270 if (tag_map != NULL && !tag_map->is_empty()) {
3271 tag_map->do_weak_oops(is_alive, f);
3272 }
3273 }
3274 }
3275 }
3277 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3279 // does this environment have the OBJECT_FREE event enabled
3280 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3282 // counters used for trace message
3283 int freed = 0;
3284 int moved = 0;
3286 JvmtiTagHashmap* hashmap = this->hashmap();
3288 // reenable sizing (if disabled)
3289 hashmap->set_resizing_enabled(true);
3291 // if the hashmap is empty then we can skip it
3292 if (hashmap->_entry_count == 0) {
3293 return;
3294 }
3296 // now iterate through each entry in the table
3298 JvmtiTagHashmapEntry** table = hashmap->table();
3299 int size = hashmap->size();
3301 JvmtiTagHashmapEntry* delayed_add = NULL;
3303 for (int pos = 0; pos < size; ++pos) {
3304 JvmtiTagHashmapEntry* entry = table[pos];
3305 JvmtiTagHashmapEntry* prev = NULL;
3307 while (entry != NULL) {
3308 JvmtiTagHashmapEntry* next = entry->next();
3310 oop* obj = entry->object_addr();
3312 // has object been GC'ed
3313 if (!is_alive->do_object_b(entry->object())) {
3314 // grab the tag
3315 jlong tag = entry->tag();
3316 guarantee(tag != 0, "checking");
3318 // remove GC'ed entry from hashmap and return the
3319 // entry to the free list
3320 hashmap->remove(prev, pos, entry);
3321 destroy_entry(entry);
3323 // post the event to the profiler
3324 if (post_object_free) {
3325 JvmtiExport::post_object_free(env(), tag);
3326 }
3328 ++freed;
3329 } else {
3330 f->do_oop(entry->object_addr());
3331 oop new_oop = entry->object();
3333 // if the object has moved then re-hash it and move its
3334 // entry to its new location.
3335 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3336 if (new_pos != (unsigned int)pos) {
3337 if (prev == NULL) {
3338 table[pos] = next;
3339 } else {
3340 prev->set_next(next);
3341 }
3342 if (new_pos < (unsigned int)pos) {
3343 entry->set_next(table[new_pos]);
3344 table[new_pos] = entry;
3345 } else {
3346 // Delay adding this entry to it's new position as we'd end up
3347 // hitting it again during this iteration.
3348 entry->set_next(delayed_add);
3349 delayed_add = entry;
3350 }
3351 moved++;
3352 } else {
3353 // object didn't move
3354 prev = entry;
3355 }
3356 }
3358 entry = next;
3359 }
3360 }
3362 // Re-add all the entries which were kept aside
3363 while (delayed_add != NULL) {
3364 JvmtiTagHashmapEntry* next = delayed_add->next();
3365 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size);
3366 delayed_add->set_next(table[pos]);
3367 table[pos] = delayed_add;
3368 delayed_add = next;
3369 }
3371 // stats
3372 if (TraceJVMTIObjectTagging) {
3373 int post_total = hashmap->_entry_count;
3374 int pre_total = post_total + freed;
3376 tty->print_cr("(%d->%d, %d freed, %d total moves)",
3377 pre_total, post_total, freed, moved);
3378 }
3379 }