Thu, 24 Jan 2013 22:13:32 -0800
8005128: JSR 292: the mlvm redefineClassInBootstrap test crashes in ConstantPool::compare_entry_to
Summary: When constant pool is copied in merge_constant_pools the invokedynamic operands must be copied before.
Reviewed-by: coleenp, twisti
Contributed-by: serguei.spitsyn@oracle.com
1 /*
2 * Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/symbolTable.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvmtifiles/jvmtiEnv.hpp"
30 #include "oops/instanceMirrorKlass.hpp"
31 #include "oops/objArrayKlass.hpp"
32 #include "oops/oop.inline2.hpp"
33 #include "prims/jvmtiEventController.hpp"
34 #include "prims/jvmtiEventController.inline.hpp"
35 #include "prims/jvmtiExport.hpp"
36 #include "prims/jvmtiImpl.hpp"
37 #include "prims/jvmtiTagMap.hpp"
38 #include "runtime/biasedLocking.hpp"
39 #include "runtime/javaCalls.hpp"
40 #include "runtime/jniHandles.hpp"
41 #include "runtime/mutex.hpp"
42 #include "runtime/mutexLocker.hpp"
43 #include "runtime/reflectionUtils.hpp"
44 #include "runtime/vframe.hpp"
45 #include "runtime/vmThread.hpp"
46 #include "runtime/vm_operations.hpp"
47 #include "services/serviceUtil.hpp"
48 #ifndef SERIALGC
49 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
50 #endif
52 // JvmtiTagHashmapEntry
53 //
54 // Each entry encapsulates a reference to the tagged object
55 // and the tag value. In addition an entry includes a next pointer which
56 // is used to chain entries together.
58 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> {
59 private:
60 friend class JvmtiTagMap;
62 oop _object; // tagged object
63 jlong _tag; // the tag
64 JvmtiTagHashmapEntry* _next; // next on the list
66 inline void init(oop object, jlong tag) {
67 _object = object;
68 _tag = tag;
69 _next = NULL;
70 }
72 // constructor
73 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); }
75 public:
77 // accessor methods
78 inline oop object() const { return _object; }
79 inline oop* object_addr() { return &_object; }
80 inline jlong tag() const { return _tag; }
82 inline void set_tag(jlong tag) {
83 assert(tag != 0, "can't be zero");
84 _tag = tag;
85 }
87 inline JvmtiTagHashmapEntry* next() const { return _next; }
88 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; }
89 };
92 // JvmtiTagHashmap
93 //
94 // A hashmap is essentially a table of pointers to entries. Entries
95 // are hashed to a location, or position in the table, and then
96 // chained from that location. The "key" for hashing is address of
97 // the object, or oop. The "value" is the tag value.
98 //
99 // A hashmap maintains a count of the number entries in the hashmap
100 // and resizes if the number of entries exceeds a given threshold.
101 // The threshold is specified as a percentage of the size - for
102 // example a threshold of 0.75 will trigger the hashmap to resize
103 // if the number of entries is >75% of table size.
104 //
105 // A hashmap provides functions for adding, removing, and finding
106 // entries. It also provides a function to iterate over all entries
107 // in the hashmap.
109 class JvmtiTagHashmap : public CHeapObj<mtInternal> {
110 private:
111 friend class JvmtiTagMap;
113 enum {
114 small_trace_threshold = 10000, // threshold for tracing
115 medium_trace_threshold = 100000,
116 large_trace_threshold = 1000000,
117 initial_trace_threshold = small_trace_threshold
118 };
120 static int _sizes[]; // array of possible hashmap sizes
121 int _size; // actual size of the table
122 int _size_index; // index into size table
124 int _entry_count; // number of entries in the hashmap
126 float _load_factor; // load factor as a % of the size
127 int _resize_threshold; // computed threshold to trigger resizing.
128 bool _resizing_enabled; // indicates if hashmap can resize
130 int _trace_threshold; // threshold for trace messages
132 JvmtiTagHashmapEntry** _table; // the table of entries.
134 // private accessors
135 int resize_threshold() const { return _resize_threshold; }
136 int trace_threshold() const { return _trace_threshold; }
138 // initialize the hashmap
139 void init(int size_index=0, float load_factor=4.0f) {
140 int initial_size = _sizes[size_index];
141 _size_index = size_index;
142 _size = initial_size;
143 _entry_count = 0;
144 if (TraceJVMTIObjectTagging) {
145 _trace_threshold = initial_trace_threshold;
146 } else {
147 _trace_threshold = -1;
148 }
149 _load_factor = load_factor;
150 _resize_threshold = (int)(_load_factor * _size);
151 _resizing_enabled = true;
152 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*);
153 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
154 if (_table == NULL) {
155 vm_exit_out_of_memory(s, "unable to allocate initial hashtable for jvmti object tags");
156 }
157 for (int i=0; i<initial_size; i++) {
158 _table[i] = NULL;
159 }
160 }
162 // hash a given key (oop) with the specified size
163 static unsigned int hash(oop key, int size) {
164 // shift right to get better distribution (as these bits will be zero
165 // with aligned addresses)
166 unsigned int addr = (unsigned int)((intptr_t)key);
167 #ifdef _LP64
168 return (addr >> 3) % size;
169 #else
170 return (addr >> 2) % size;
171 #endif
172 }
174 // hash a given key (oop)
175 unsigned int hash(oop key) {
176 return hash(key, _size);
177 }
179 // resize the hashmap - allocates a large table and re-hashes
180 // all entries into the new table.
181 void resize() {
182 int new_size_index = _size_index+1;
183 int new_size = _sizes[new_size_index];
184 if (new_size < 0) {
185 // hashmap already at maximum capacity
186 return;
187 }
189 // allocate new table
190 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*);
191 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal);
192 if (new_table == NULL) {
193 warning("unable to allocate larger hashtable for jvmti object tags");
194 set_resizing_enabled(false);
195 return;
196 }
198 // initialize new table
199 int i;
200 for (i=0; i<new_size; i++) {
201 new_table[i] = NULL;
202 }
204 // rehash all entries into the new table
205 for (i=0; i<_size; i++) {
206 JvmtiTagHashmapEntry* entry = _table[i];
207 while (entry != NULL) {
208 JvmtiTagHashmapEntry* next = entry->next();
209 oop key = entry->object();
210 assert(key != NULL, "jni weak reference cleared!!");
211 unsigned int h = hash(key, new_size);
212 JvmtiTagHashmapEntry* anchor = new_table[h];
213 if (anchor == NULL) {
214 new_table[h] = entry;
215 entry->set_next(NULL);
216 } else {
217 entry->set_next(anchor);
218 new_table[h] = entry;
219 }
220 entry = next;
221 }
222 }
224 // free old table and update settings.
225 os::free((void*)_table);
226 _table = new_table;
227 _size_index = new_size_index;
228 _size = new_size;
230 // compute new resize threshold
231 _resize_threshold = (int)(_load_factor * _size);
232 }
235 // internal remove function - remove an entry at a given position in the
236 // table.
237 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) {
238 assert(pos >= 0 && pos < _size, "out of range");
239 if (prev == NULL) {
240 _table[pos] = entry->next();
241 } else {
242 prev->set_next(entry->next());
243 }
244 assert(_entry_count > 0, "checking");
245 _entry_count--;
246 }
248 // resizing switch
249 bool is_resizing_enabled() const { return _resizing_enabled; }
250 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; }
252 // debugging
253 void print_memory_usage();
254 void compute_next_trace_threshold();
256 public:
258 // create a JvmtiTagHashmap of a preferred size and optionally a load factor.
259 // The preferred size is rounded down to an actual size.
260 JvmtiTagHashmap(int size, float load_factor=0.0f) {
261 int i=0;
262 while (_sizes[i] < size) {
263 if (_sizes[i] < 0) {
264 assert(i > 0, "sanity check");
265 i--;
266 break;
267 }
268 i++;
269 }
271 // if a load factor is specified then use it, otherwise use default
272 if (load_factor > 0.01f) {
273 init(i, load_factor);
274 } else {
275 init(i);
276 }
277 }
279 // create a JvmtiTagHashmap with default settings
280 JvmtiTagHashmap() {
281 init();
282 }
284 // release table when JvmtiTagHashmap destroyed
285 ~JvmtiTagHashmap() {
286 if (_table != NULL) {
287 os::free((void*)_table);
288 _table = NULL;
289 }
290 }
292 // accessors
293 int size() const { return _size; }
294 JvmtiTagHashmapEntry** table() const { return _table; }
295 int entry_count() const { return _entry_count; }
297 // find an entry in the hashmap, returns NULL if not found.
298 inline JvmtiTagHashmapEntry* find(oop key) {
299 unsigned int h = hash(key);
300 JvmtiTagHashmapEntry* entry = _table[h];
301 while (entry != NULL) {
302 if (entry->object() == key) {
303 return entry;
304 }
305 entry = entry->next();
306 }
307 return NULL;
308 }
311 // add a new entry to hashmap
312 inline void add(oop key, JvmtiTagHashmapEntry* entry) {
313 assert(key != NULL, "checking");
314 assert(find(key) == NULL, "duplicate detected");
315 unsigned int h = hash(key);
316 JvmtiTagHashmapEntry* anchor = _table[h];
317 if (anchor == NULL) {
318 _table[h] = entry;
319 entry->set_next(NULL);
320 } else {
321 entry->set_next(anchor);
322 _table[h] = entry;
323 }
325 _entry_count++;
326 if (trace_threshold() > 0 && entry_count() >= trace_threshold()) {
327 assert(TraceJVMTIObjectTagging, "should only get here when tracing");
328 print_memory_usage();
329 compute_next_trace_threshold();
330 }
332 // if the number of entries exceed the threshold then resize
333 if (entry_count() > resize_threshold() && is_resizing_enabled()) {
334 resize();
335 }
336 }
338 // remove an entry with the given key.
339 inline JvmtiTagHashmapEntry* remove(oop key) {
340 unsigned int h = hash(key);
341 JvmtiTagHashmapEntry* entry = _table[h];
342 JvmtiTagHashmapEntry* prev = NULL;
343 while (entry != NULL) {
344 if (key == entry->object()) {
345 break;
346 }
347 prev = entry;
348 entry = entry->next();
349 }
350 if (entry != NULL) {
351 remove(prev, h, entry);
352 }
353 return entry;
354 }
356 // iterate over all entries in the hashmap
357 void entry_iterate(JvmtiTagHashmapEntryClosure* closure);
358 };
360 // possible hashmap sizes - odd primes that roughly double in size.
361 // To avoid excessive resizing the odd primes from 4801-76831 and
362 // 76831-307261 have been removed. The list must be terminated by -1.
363 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891,
364 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 };
367 // A supporting class for iterating over all entries in Hashmap
368 class JvmtiTagHashmapEntryClosure {
369 public:
370 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0;
371 };
374 // iterate over all entries in the hashmap
375 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
376 for (int i=0; i<_size; i++) {
377 JvmtiTagHashmapEntry* entry = _table[i];
378 JvmtiTagHashmapEntry* prev = NULL;
379 while (entry != NULL) {
380 // obtain the next entry before invoking do_entry - this is
381 // necessary because do_entry may remove the entry from the
382 // hashmap.
383 JvmtiTagHashmapEntry* next = entry->next();
384 closure->do_entry(entry);
385 entry = next;
386 }
387 }
388 }
390 // debugging
391 void JvmtiTagHashmap::print_memory_usage() {
392 intptr_t p = (intptr_t)this;
393 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p);
395 // table + entries in KB
396 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) +
397 entry_count()*sizeof(JvmtiTagHashmapEntry))/K;
399 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K);
400 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]",
401 entry_count(), hashmap_usage, weak_globals_usage);
402 }
404 // compute threshold for the next trace message
405 void JvmtiTagHashmap::compute_next_trace_threshold() {
406 if (trace_threshold() < medium_trace_threshold) {
407 _trace_threshold += small_trace_threshold;
408 } else {
409 if (trace_threshold() < large_trace_threshold) {
410 _trace_threshold += medium_trace_threshold;
411 } else {
412 _trace_threshold += large_trace_threshold;
413 }
414 }
415 }
417 // create a JvmtiTagMap
418 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
419 _env(env),
420 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false),
421 _free_entries(NULL),
422 _free_entries_count(0)
423 {
424 assert(JvmtiThreadState_lock->is_locked(), "sanity check");
425 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
427 _hashmap = new JvmtiTagHashmap();
429 // finally add us to the environment
430 ((JvmtiEnvBase *)env)->set_tag_map(this);
431 }
434 // destroy a JvmtiTagMap
435 JvmtiTagMap::~JvmtiTagMap() {
437 // no lock acquired as we assume the enclosing environment is
438 // also being destroryed.
439 ((JvmtiEnvBase *)_env)->set_tag_map(NULL);
441 JvmtiTagHashmapEntry** table = _hashmap->table();
442 for (int j = 0; j < _hashmap->size(); j++) {
443 JvmtiTagHashmapEntry* entry = table[j];
444 while (entry != NULL) {
445 JvmtiTagHashmapEntry* next = entry->next();
446 delete entry;
447 entry = next;
448 }
449 }
451 // finally destroy the hashmap
452 delete _hashmap;
453 _hashmap = NULL;
455 // remove any entries on the free list
456 JvmtiTagHashmapEntry* entry = _free_entries;
457 while (entry != NULL) {
458 JvmtiTagHashmapEntry* next = entry->next();
459 delete entry;
460 entry = next;
461 }
462 _free_entries = NULL;
463 }
465 // create a hashmap entry
466 // - if there's an entry on the (per-environment) free list then this
467 // is returned. Otherwise an new entry is allocated.
468 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) {
469 assert(Thread::current()->is_VM_thread() || is_locked(), "checking");
470 JvmtiTagHashmapEntry* entry;
471 if (_free_entries == NULL) {
472 entry = new JvmtiTagHashmapEntry(ref, tag);
473 } else {
474 assert(_free_entries_count > 0, "mismatched _free_entries_count");
475 _free_entries_count--;
476 entry = _free_entries;
477 _free_entries = entry->next();
478 entry->init(ref, tag);
479 }
480 return entry;
481 }
483 // destroy an entry by returning it to the free list
484 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) {
485 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
486 // limit the size of the free list
487 if (_free_entries_count >= max_free_entries) {
488 delete entry;
489 } else {
490 entry->set_next(_free_entries);
491 _free_entries = entry;
492 _free_entries_count++;
493 }
494 }
496 // returns the tag map for the given environments. If the tag map
497 // doesn't exist then it is created.
498 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
499 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map();
500 if (tag_map == NULL) {
501 MutexLocker mu(JvmtiThreadState_lock);
502 tag_map = ((JvmtiEnvBase*)env)->tag_map();
503 if (tag_map == NULL) {
504 tag_map = new JvmtiTagMap(env);
505 }
506 } else {
507 CHECK_UNHANDLED_OOPS_ONLY(Thread::current()->clear_unhandled_oops());
508 }
509 return tag_map;
510 }
512 // iterate over all entries in the tag map.
513 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) {
514 hashmap()->entry_iterate(closure);
515 }
517 // returns true if the hashmaps are empty
518 bool JvmtiTagMap::is_empty() {
519 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking");
520 return hashmap()->entry_count() == 0;
521 }
524 // Return the tag value for an object, or 0 if the object is
525 // not tagged
526 //
527 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) {
528 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o);
529 if (entry == NULL) {
530 return 0;
531 } else {
532 return entry->tag();
533 }
534 }
537 // A CallbackWrapper is a support class for querying and tagging an object
538 // around a callback to a profiler. The constructor does pre-callback
539 // work to get the tag value, klass tag value, ... and the destructor
540 // does the post-callback work of tagging or untagging the object.
541 //
542 // {
543 // CallbackWrapper wrapper(tag_map, o);
544 //
545 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...)
546 //
547 // } // wrapper goes out of scope here which results in the destructor
548 // checking to see if the object has been tagged, untagged, or the
549 // tag value has changed.
550 //
551 class CallbackWrapper : public StackObj {
552 private:
553 JvmtiTagMap* _tag_map;
554 JvmtiTagHashmap* _hashmap;
555 JvmtiTagHashmapEntry* _entry;
556 oop _o;
557 jlong _obj_size;
558 jlong _obj_tag;
559 jlong _klass_tag;
561 protected:
562 JvmtiTagMap* tag_map() const { return _tag_map; }
564 // invoked post-callback to tag, untag, or update the tag of an object
565 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap,
566 JvmtiTagHashmapEntry* entry, jlong obj_tag);
567 public:
568 CallbackWrapper(JvmtiTagMap* tag_map, oop o) {
569 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(),
570 "MT unsafe or must be VM thread");
572 // object to tag
573 _o = o;
575 // object size
576 _obj_size = (jlong)_o->size() * wordSize;
578 // record the context
579 _tag_map = tag_map;
580 _hashmap = tag_map->hashmap();
581 _entry = _hashmap->find(_o);
583 // get object tag
584 _obj_tag = (_entry == NULL) ? 0 : _entry->tag();
586 // get the class and the class's tag value
587 assert(SystemDictionary::Class_klass()->oop_is_instanceMirror(), "Is not?");
589 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror());
590 }
592 ~CallbackWrapper() {
593 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag);
594 }
596 inline jlong* obj_tag_p() { return &_obj_tag; }
597 inline jlong obj_size() const { return _obj_size; }
598 inline jlong obj_tag() const { return _obj_tag; }
599 inline jlong klass_tag() const { return _klass_tag; }
600 };
604 // callback post-callback to tag, untag, or update the tag of an object
605 void inline CallbackWrapper::post_callback_tag_update(oop o,
606 JvmtiTagHashmap* hashmap,
607 JvmtiTagHashmapEntry* entry,
608 jlong obj_tag) {
609 if (entry == NULL) {
610 if (obj_tag != 0) {
611 // callback has tagged the object
612 assert(Thread::current()->is_VM_thread(), "must be VMThread");
613 entry = tag_map()->create_entry(o, obj_tag);
614 hashmap->add(o, entry);
615 }
616 } else {
617 // object was previously tagged - the callback may have untagged
618 // the object or changed the tag value
619 if (obj_tag == 0) {
621 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o);
622 assert(entry_removed == entry, "checking");
623 tag_map()->destroy_entry(entry);
625 } else {
626 if (obj_tag != entry->tag()) {
627 entry->set_tag(obj_tag);
628 }
629 }
630 }
631 }
633 // An extended CallbackWrapper used when reporting an object reference
634 // to the agent.
635 //
636 // {
637 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o);
638 //
639 // (*callback)(wrapper.klass_tag(),
640 // wrapper.obj_size(),
641 // wrapper.obj_tag_p()
642 // wrapper.referrer_tag_p(), ...)
643 //
644 // } // wrapper goes out of scope here which results in the destructor
645 // checking to see if the referrer object has been tagged, untagged,
646 // or the tag value has changed.
647 //
648 class TwoOopCallbackWrapper : public CallbackWrapper {
649 private:
650 bool _is_reference_to_self;
651 JvmtiTagHashmap* _referrer_hashmap;
652 JvmtiTagHashmapEntry* _referrer_entry;
653 oop _referrer;
654 jlong _referrer_obj_tag;
655 jlong _referrer_klass_tag;
656 jlong* _referrer_tag_p;
658 bool is_reference_to_self() const { return _is_reference_to_self; }
660 public:
661 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) :
662 CallbackWrapper(tag_map, o)
663 {
664 // self reference needs to be handled in a special way
665 _is_reference_to_self = (referrer == o);
667 if (_is_reference_to_self) {
668 _referrer_klass_tag = klass_tag();
669 _referrer_tag_p = obj_tag_p();
670 } else {
671 _referrer = referrer;
672 // record the context
673 _referrer_hashmap = tag_map->hashmap();
674 _referrer_entry = _referrer_hashmap->find(_referrer);
676 // get object tag
677 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag();
678 _referrer_tag_p = &_referrer_obj_tag;
680 // get referrer class tag.
681 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror());
682 }
683 }
685 ~TwoOopCallbackWrapper() {
686 if (!is_reference_to_self()){
687 post_callback_tag_update(_referrer,
688 _referrer_hashmap,
689 _referrer_entry,
690 _referrer_obj_tag);
691 }
692 }
694 // address of referrer tag
695 // (for a self reference this will return the same thing as obj_tag_p())
696 inline jlong* referrer_tag_p() { return _referrer_tag_p; }
698 // referrer's class tag
699 inline jlong referrer_klass_tag() { return _referrer_klass_tag; }
700 };
702 // tag an object
703 //
704 // This function is performance critical. If many threads attempt to tag objects
705 // around the same time then it's possible that the Mutex associated with the
706 // tag map will be a hot lock.
707 void JvmtiTagMap::set_tag(jobject object, jlong tag) {
708 MutexLocker ml(lock());
710 // resolve the object
711 oop o = JNIHandles::resolve_non_null(object);
713 // see if the object is already tagged
714 JvmtiTagHashmap* hashmap = _hashmap;
715 JvmtiTagHashmapEntry* entry = hashmap->find(o);
717 // if the object is not already tagged then we tag it
718 if (entry == NULL) {
719 if (tag != 0) {
720 entry = create_entry(o, tag);
721 hashmap->add(o, entry);
722 } else {
723 // no-op
724 }
725 } else {
726 // if the object is already tagged then we either update
727 // the tag (if a new tag value has been provided)
728 // or remove the object if the new tag value is 0.
729 if (tag == 0) {
730 hashmap->remove(o);
731 destroy_entry(entry);
732 } else {
733 entry->set_tag(tag);
734 }
735 }
736 }
738 // get the tag for an object
739 jlong JvmtiTagMap::get_tag(jobject object) {
740 MutexLocker ml(lock());
742 // resolve the object
743 oop o = JNIHandles::resolve_non_null(object);
745 return tag_for(this, o);
746 }
749 // Helper class used to describe the static or instance fields of a class.
750 // For each field it holds the field index (as defined by the JVMTI specification),
751 // the field type, and the offset.
753 class ClassFieldDescriptor: public CHeapObj<mtInternal> {
754 private:
755 int _field_index;
756 int _field_offset;
757 char _field_type;
758 public:
759 ClassFieldDescriptor(int index, char type, int offset) :
760 _field_index(index), _field_type(type), _field_offset(offset) {
761 }
762 int field_index() const { return _field_index; }
763 char field_type() const { return _field_type; }
764 int field_offset() const { return _field_offset; }
765 };
767 class ClassFieldMap: public CHeapObj<mtInternal> {
768 private:
769 enum {
770 initial_field_count = 5
771 };
773 // list of field descriptors
774 GrowableArray<ClassFieldDescriptor*>* _fields;
776 // constructor
777 ClassFieldMap();
779 // add a field
780 void add(int index, char type, int offset);
782 // returns the field count for the given class
783 static int compute_field_count(instanceKlassHandle ikh);
785 public:
786 ~ClassFieldMap();
788 // access
789 int field_count() { return _fields->length(); }
790 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); }
792 // functions to create maps of static or instance fields
793 static ClassFieldMap* create_map_of_static_fields(Klass* k);
794 static ClassFieldMap* create_map_of_instance_fields(oop obj);
795 };
797 ClassFieldMap::ClassFieldMap() {
798 _fields = new (ResourceObj::C_HEAP, mtInternal)
799 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true);
800 }
802 ClassFieldMap::~ClassFieldMap() {
803 for (int i=0; i<_fields->length(); i++) {
804 delete _fields->at(i);
805 }
806 delete _fields;
807 }
809 void ClassFieldMap::add(int index, char type, int offset) {
810 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset);
811 _fields->append(field);
812 }
814 // Returns a heap allocated ClassFieldMap to describe the static fields
815 // of the given class.
816 //
817 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) {
818 HandleMark hm;
819 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), k);
821 // create the field map
822 ClassFieldMap* field_map = new ClassFieldMap();
824 FilteredFieldStream f(ikh, false, false);
825 int max_field_index = f.field_count()-1;
827 int index = 0;
828 for (FilteredFieldStream fld(ikh, true, true); !fld.eos(); fld.next(), index++) {
829 // ignore instance fields
830 if (!fld.access_flags().is_static()) {
831 continue;
832 }
833 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
834 }
835 return field_map;
836 }
838 // Returns a heap allocated ClassFieldMap to describe the instance fields
839 // of the given class. All instance fields are included (this means public
840 // and private fields declared in superclasses and superinterfaces too).
841 //
842 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) {
843 HandleMark hm;
844 instanceKlassHandle ikh = instanceKlassHandle(Thread::current(), obj->klass());
846 // create the field map
847 ClassFieldMap* field_map = new ClassFieldMap();
849 FilteredFieldStream f(ikh, false, false);
851 int max_field_index = f.field_count()-1;
853 int index = 0;
854 for (FilteredFieldStream fld(ikh, false, false); !fld.eos(); fld.next(), index++) {
855 // ignore static fields
856 if (fld.access_flags().is_static()) {
857 continue;
858 }
859 field_map->add(max_field_index - index, fld.signature()->byte_at(0), fld.offset());
860 }
862 return field_map;
863 }
865 // Helper class used to cache a ClassFileMap for the instance fields of
866 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during
867 // heap iteration and avoid creating a field map for each object in the heap
868 // (only need to create the map when the first instance of a class is encountered).
869 //
870 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> {
871 private:
872 enum {
873 initial_class_count = 200
874 };
875 ClassFieldMap* _field_map;
877 ClassFieldMap* field_map() const { return _field_map; }
879 JvmtiCachedClassFieldMap(ClassFieldMap* field_map);
880 ~JvmtiCachedClassFieldMap();
882 static GrowableArray<InstanceKlass*>* _class_list;
883 static void add_to_class_list(InstanceKlass* ik);
885 public:
886 // returns the field map for a given object (returning map cached
887 // by InstanceKlass if possible
888 static ClassFieldMap* get_map_of_instance_fields(oop obj);
890 // removes the field map from all instanceKlasses - should be
891 // called before VM operation completes
892 static void clear_cache();
894 // returns the number of ClassFieldMap cached by instanceKlasses
895 static int cached_field_map_count();
896 };
898 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list;
900 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
901 _field_map = field_map;
902 }
904 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
905 if (_field_map != NULL) {
906 delete _field_map;
907 }
908 }
910 // Marker class to ensure that the class file map cache is only used in a defined
911 // scope.
912 class ClassFieldMapCacheMark : public StackObj {
913 private:
914 static bool _is_active;
915 public:
916 ClassFieldMapCacheMark() {
917 assert(Thread::current()->is_VM_thread(), "must be VMThread");
918 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty");
919 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested");
920 _is_active = true;
921 }
922 ~ClassFieldMapCacheMark() {
923 JvmtiCachedClassFieldMap::clear_cache();
924 _is_active = false;
925 }
926 static bool is_active() { return _is_active; }
927 };
929 bool ClassFieldMapCacheMark::_is_active;
932 // record that the given InstanceKlass is caching a field map
933 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
934 if (_class_list == NULL) {
935 _class_list = new (ResourceObj::C_HEAP, mtInternal)
936 GrowableArray<InstanceKlass*>(initial_class_count, true);
937 }
938 _class_list->push(ik);
939 }
941 // returns the instance field map for the given object
942 // (returns field map cached by the InstanceKlass if possible)
943 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
944 assert(Thread::current()->is_VM_thread(), "must be VMThread");
945 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active");
947 Klass* k = obj->klass();
948 InstanceKlass* ik = InstanceKlass::cast(k);
950 // return cached map if possible
951 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
952 if (cached_map != NULL) {
953 assert(cached_map->field_map() != NULL, "missing field list");
954 return cached_map->field_map();
955 } else {
956 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
957 cached_map = new JvmtiCachedClassFieldMap(field_map);
958 ik->set_jvmti_cached_class_field_map(cached_map);
959 add_to_class_list(ik);
960 return field_map;
961 }
962 }
964 // remove the fields maps cached from all instanceKlasses
965 void JvmtiCachedClassFieldMap::clear_cache() {
966 assert(Thread::current()->is_VM_thread(), "must be VMThread");
967 if (_class_list != NULL) {
968 for (int i = 0; i < _class_list->length(); i++) {
969 InstanceKlass* ik = _class_list->at(i);
970 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
971 assert(cached_map != NULL, "should not be NULL");
972 ik->set_jvmti_cached_class_field_map(NULL);
973 delete cached_map; // deletes the encapsulated field map
974 }
975 delete _class_list;
976 _class_list = NULL;
977 }
978 }
980 // returns the number of ClassFieldMap cached by instanceKlasses
981 int JvmtiCachedClassFieldMap::cached_field_map_count() {
982 return (_class_list == NULL) ? 0 : _class_list->length();
983 }
985 // helper function to indicate if an object is filtered by its tag or class tag
986 static inline bool is_filtered_by_heap_filter(jlong obj_tag,
987 jlong klass_tag,
988 int heap_filter) {
989 // apply the heap filter
990 if (obj_tag != 0) {
991 // filter out tagged objects
992 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true;
993 } else {
994 // filter out untagged objects
995 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true;
996 }
997 if (klass_tag != 0) {
998 // filter out objects with tagged classes
999 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true;
1000 } else {
1001 // filter out objects with untagged classes.
1002 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true;
1003 }
1004 return false;
1005 }
1007 // helper function to indicate if an object is filtered by a klass filter
1008 static inline bool is_filtered_by_klass_filter(oop obj, KlassHandle klass_filter) {
1009 if (!klass_filter.is_null()) {
1010 if (obj->klass() != klass_filter()) {
1011 return true;
1012 }
1013 }
1014 return false;
1015 }
1017 // helper function to tell if a field is a primitive field or not
1018 static inline bool is_primitive_field_type(char type) {
1019 return (type != 'L' && type != '[');
1020 }
1022 // helper function to copy the value from location addr to jvalue.
1023 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) {
1024 switch (value_type) {
1025 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; }
1026 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; }
1027 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; }
1028 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; }
1029 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; }
1030 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; }
1031 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; }
1032 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; }
1033 default: ShouldNotReachHere();
1034 }
1035 }
1037 // helper function to invoke string primitive value callback
1038 // returns visit control flags
1039 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
1040 CallbackWrapper* wrapper,
1041 oop str,
1042 void* user_data)
1043 {
1044 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
1046 // get the string value and length
1047 // (string value may be offset from the base)
1048 int s_len = java_lang_String::length(str);
1049 typeArrayOop s_value = java_lang_String::value(str);
1050 int s_offset = java_lang_String::offset(str);
1051 jchar* value;
1052 if (s_len > 0) {
1053 value = s_value->char_at_addr(s_offset);
1054 } else {
1055 value = (jchar*) s_value->base(T_CHAR);
1056 }
1058 // invoke the callback
1059 return (*cb)(wrapper->klass_tag(),
1060 wrapper->obj_size(),
1061 wrapper->obj_tag_p(),
1062 value,
1063 (jint)s_len,
1064 user_data);
1065 }
1067 // helper function to invoke string primitive value callback
1068 // returns visit control flags
1069 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb,
1070 CallbackWrapper* wrapper,
1071 oop obj,
1072 void* user_data)
1073 {
1074 assert(obj->is_typeArray(), "not a primitive array");
1076 // get base address of first element
1077 typeArrayOop array = typeArrayOop(obj);
1078 BasicType type = TypeArrayKlass::cast(array->klass())->element_type();
1079 void* elements = array->base(type);
1081 // jvmtiPrimitiveType is defined so this mapping is always correct
1082 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type);
1084 return (*cb)(wrapper->klass_tag(),
1085 wrapper->obj_size(),
1086 wrapper->obj_tag_p(),
1087 (jint)array->length(),
1088 elem_type,
1089 elements,
1090 user_data);
1091 }
1093 // helper function to invoke the primitive field callback for all static fields
1094 // of a given class
1095 static jint invoke_primitive_field_callback_for_static_fields
1096 (CallbackWrapper* wrapper,
1097 oop obj,
1098 jvmtiPrimitiveFieldCallback cb,
1099 void* user_data)
1100 {
1101 // for static fields only the index will be set
1102 static jvmtiHeapReferenceInfo reference_info = { 0 };
1104 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class");
1105 if (java_lang_Class::is_primitive(obj)) {
1106 return 0;
1107 }
1108 Klass* klass = java_lang_Class::as_Klass(obj);
1110 // ignore classes for object and type arrays
1111 if (!klass->oop_is_instance()) {
1112 return 0;
1113 }
1115 // ignore classes which aren't linked yet
1116 InstanceKlass* ik = InstanceKlass::cast(klass);
1117 if (!ik->is_linked()) {
1118 return 0;
1119 }
1121 // get the field map
1122 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
1124 // invoke the callback for each static primitive field
1125 for (int i=0; i<field_map->field_count(); i++) {
1126 ClassFieldDescriptor* field = field_map->field_at(i);
1128 // ignore non-primitive fields
1129 char type = field->field_type();
1130 if (!is_primitive_field_type(type)) {
1131 continue;
1132 }
1133 // one-to-one mapping
1134 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1136 // get offset and field value
1137 int offset = field->field_offset();
1138 address addr = (address)klass->java_mirror() + offset;
1139 jvalue value;
1140 copy_to_jvalue(&value, addr, value_type);
1142 // field index
1143 reference_info.field.index = field->field_index();
1145 // invoke the callback
1146 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
1147 &reference_info,
1148 wrapper->klass_tag(),
1149 wrapper->obj_tag_p(),
1150 value,
1151 value_type,
1152 user_data);
1153 if (res & JVMTI_VISIT_ABORT) {
1154 delete field_map;
1155 return res;
1156 }
1157 }
1159 delete field_map;
1160 return 0;
1161 }
1163 // helper function to invoke the primitive field callback for all instance fields
1164 // of a given object
1165 static jint invoke_primitive_field_callback_for_instance_fields(
1166 CallbackWrapper* wrapper,
1167 oop obj,
1168 jvmtiPrimitiveFieldCallback cb,
1169 void* user_data)
1170 {
1171 // for instance fields only the index will be set
1172 static jvmtiHeapReferenceInfo reference_info = { 0 };
1174 // get the map of the instance fields
1175 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj);
1177 // invoke the callback for each instance primitive field
1178 for (int i=0; i<fields->field_count(); i++) {
1179 ClassFieldDescriptor* field = fields->field_at(i);
1181 // ignore non-primitive fields
1182 char type = field->field_type();
1183 if (!is_primitive_field_type(type)) {
1184 continue;
1185 }
1186 // one-to-one mapping
1187 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
1189 // get offset and field value
1190 int offset = field->field_offset();
1191 address addr = (address)obj + offset;
1192 jvalue value;
1193 copy_to_jvalue(&value, addr, value_type);
1195 // field index
1196 reference_info.field.index = field->field_index();
1198 // invoke the callback
1199 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD,
1200 &reference_info,
1201 wrapper->klass_tag(),
1202 wrapper->obj_tag_p(),
1203 value,
1204 value_type,
1205 user_data);
1206 if (res & JVMTI_VISIT_ABORT) {
1207 return res;
1208 }
1209 }
1210 return 0;
1211 }
1214 // VM operation to iterate over all objects in the heap (both reachable
1215 // and unreachable)
1216 class VM_HeapIterateOperation: public VM_Operation {
1217 private:
1218 ObjectClosure* _blk;
1219 public:
1220 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; }
1222 VMOp_Type type() const { return VMOp_HeapIterateOperation; }
1223 void doit() {
1224 // allows class files maps to be cached during iteration
1225 ClassFieldMapCacheMark cm;
1227 // make sure that heap is parsable (fills TLABs with filler objects)
1228 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
1230 // Verify heap before iteration - if the heap gets corrupted then
1231 // JVMTI's IterateOverHeap will crash.
1232 if (VerifyBeforeIteration) {
1233 Universe::verify();
1234 }
1236 // do the iteration
1237 // If this operation encounters a bad object when using CMS,
1238 // consider using safe_object_iterate() which avoids perm gen
1239 // objects that may contain bad references.
1240 Universe::heap()->object_iterate(_blk);
1241 }
1243 };
1246 // An ObjectClosure used to support the deprecated IterateOverHeap and
1247 // IterateOverInstancesOfClass functions
1248 class IterateOverHeapObjectClosure: public ObjectClosure {
1249 private:
1250 JvmtiTagMap* _tag_map;
1251 KlassHandle _klass;
1252 jvmtiHeapObjectFilter _object_filter;
1253 jvmtiHeapObjectCallback _heap_object_callback;
1254 const void* _user_data;
1256 // accessors
1257 JvmtiTagMap* tag_map() const { return _tag_map; }
1258 jvmtiHeapObjectFilter object_filter() const { return _object_filter; }
1259 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; }
1260 KlassHandle klass() const { return _klass; }
1261 const void* user_data() const { return _user_data; }
1263 // indicates if iteration has been aborted
1264 bool _iteration_aborted;
1265 bool is_iteration_aborted() const { return _iteration_aborted; }
1266 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; }
1268 public:
1269 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map,
1270 KlassHandle klass,
1271 jvmtiHeapObjectFilter object_filter,
1272 jvmtiHeapObjectCallback heap_object_callback,
1273 const void* user_data) :
1274 _tag_map(tag_map),
1275 _klass(klass),
1276 _object_filter(object_filter),
1277 _heap_object_callback(heap_object_callback),
1278 _user_data(user_data),
1279 _iteration_aborted(false)
1280 {
1281 }
1283 void do_object(oop o);
1284 };
1286 // invoked for each object in the heap
1287 void IterateOverHeapObjectClosure::do_object(oop o) {
1288 // check if iteration has been halted
1289 if (is_iteration_aborted()) return;
1291 // ignore any objects that aren't visible to profiler
1292 if (!ServiceUtil::visible_oop(o)) return;
1294 // instanceof check when filtering by klass
1295 if (!klass().is_null() && !o->is_a(klass()())) {
1296 return;
1297 }
1298 // prepare for the calllback
1299 CallbackWrapper wrapper(tag_map(), o);
1301 // if the object is tagged and we're only interested in untagged objects
1302 // then don't invoke the callback. Similiarly, if the object is untagged
1303 // and we're only interested in tagged objects we skip the callback.
1304 if (wrapper.obj_tag() != 0) {
1305 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return;
1306 } else {
1307 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return;
1308 }
1310 // invoke the agent's callback
1311 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(),
1312 wrapper.obj_size(),
1313 wrapper.obj_tag_p(),
1314 (void*)user_data());
1315 if (control == JVMTI_ITERATION_ABORT) {
1316 set_iteration_aborted(true);
1317 }
1318 }
1320 // An ObjectClosure used to support the IterateThroughHeap function
1321 class IterateThroughHeapObjectClosure: public ObjectClosure {
1322 private:
1323 JvmtiTagMap* _tag_map;
1324 KlassHandle _klass;
1325 int _heap_filter;
1326 const jvmtiHeapCallbacks* _callbacks;
1327 const void* _user_data;
1329 // accessor functions
1330 JvmtiTagMap* tag_map() const { return _tag_map; }
1331 int heap_filter() const { return _heap_filter; }
1332 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; }
1333 KlassHandle klass() const { return _klass; }
1334 const void* user_data() const { return _user_data; }
1336 // indicates if the iteration has been aborted
1337 bool _iteration_aborted;
1338 bool is_iteration_aborted() const { return _iteration_aborted; }
1340 // used to check the visit control flags. If the abort flag is set
1341 // then we set the iteration aborted flag so that the iteration completes
1342 // without processing any further objects
1343 bool check_flags_for_abort(jint flags) {
1344 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0;
1345 if (is_abort) {
1346 _iteration_aborted = true;
1347 }
1348 return is_abort;
1349 }
1351 public:
1352 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map,
1353 KlassHandle klass,
1354 int heap_filter,
1355 const jvmtiHeapCallbacks* heap_callbacks,
1356 const void* user_data) :
1357 _tag_map(tag_map),
1358 _klass(klass),
1359 _heap_filter(heap_filter),
1360 _callbacks(heap_callbacks),
1361 _user_data(user_data),
1362 _iteration_aborted(false)
1363 {
1364 }
1366 void do_object(oop o);
1367 };
1369 // invoked for each object in the heap
1370 void IterateThroughHeapObjectClosure::do_object(oop obj) {
1371 // check if iteration has been halted
1372 if (is_iteration_aborted()) return;
1374 // ignore any objects that aren't visible to profiler
1375 if (!ServiceUtil::visible_oop(obj)) return;
1377 // apply class filter
1378 if (is_filtered_by_klass_filter(obj, klass())) return;
1380 // prepare for callback
1381 CallbackWrapper wrapper(tag_map(), obj);
1383 // check if filtered by the heap filter
1384 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) {
1385 return;
1386 }
1388 // for arrays we need the length, otherwise -1
1389 bool is_array = obj->is_array();
1390 int len = is_array ? arrayOop(obj)->length() : -1;
1392 // invoke the object callback (if callback is provided)
1393 if (callbacks()->heap_iteration_callback != NULL) {
1394 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
1395 jint res = (*cb)(wrapper.klass_tag(),
1396 wrapper.obj_size(),
1397 wrapper.obj_tag_p(),
1398 (jint)len,
1399 (void*)user_data());
1400 if (check_flags_for_abort(res)) return;
1401 }
1403 // for objects and classes we report primitive fields if callback provided
1404 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
1405 jint res;
1406 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
1407 if (obj->klass() == SystemDictionary::Class_klass()) {
1408 res = invoke_primitive_field_callback_for_static_fields(&wrapper,
1409 obj,
1410 cb,
1411 (void*)user_data());
1412 } else {
1413 res = invoke_primitive_field_callback_for_instance_fields(&wrapper,
1414 obj,
1415 cb,
1416 (void*)user_data());
1417 }
1418 if (check_flags_for_abort(res)) return;
1419 }
1421 // string callback
1422 if (!is_array &&
1423 callbacks()->string_primitive_value_callback != NULL &&
1424 obj->klass() == SystemDictionary::String_klass()) {
1425 jint res = invoke_string_value_callback(
1426 callbacks()->string_primitive_value_callback,
1427 &wrapper,
1428 obj,
1429 (void*)user_data() );
1430 if (check_flags_for_abort(res)) return;
1431 }
1433 // array callback
1434 if (is_array &&
1435 callbacks()->array_primitive_value_callback != NULL &&
1436 obj->is_typeArray()) {
1437 jint res = invoke_array_primitive_value_callback(
1438 callbacks()->array_primitive_value_callback,
1439 &wrapper,
1440 obj,
1441 (void*)user_data() );
1442 if (check_flags_for_abort(res)) return;
1443 }
1444 };
1447 // Deprecated function to iterate over all objects in the heap
1448 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter,
1449 KlassHandle klass,
1450 jvmtiHeapObjectCallback heap_object_callback,
1451 const void* user_data)
1452 {
1453 MutexLocker ml(Heap_lock);
1454 IterateOverHeapObjectClosure blk(this,
1455 klass,
1456 object_filter,
1457 heap_object_callback,
1458 user_data);
1459 VM_HeapIterateOperation op(&blk);
1460 VMThread::execute(&op);
1461 }
1464 // Iterates over all objects in the heap
1465 void JvmtiTagMap::iterate_through_heap(jint heap_filter,
1466 KlassHandle klass,
1467 const jvmtiHeapCallbacks* callbacks,
1468 const void* user_data)
1469 {
1470 MutexLocker ml(Heap_lock);
1471 IterateThroughHeapObjectClosure blk(this,
1472 klass,
1473 heap_filter,
1474 callbacks,
1475 user_data);
1476 VM_HeapIterateOperation op(&blk);
1477 VMThread::execute(&op);
1478 }
1480 // support class for get_objects_with_tags
1482 class TagObjectCollector : public JvmtiTagHashmapEntryClosure {
1483 private:
1484 JvmtiEnv* _env;
1485 jlong* _tags;
1486 jint _tag_count;
1488 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs)
1489 GrowableArray<uint64_t>* _tag_results; // collected tags
1491 public:
1492 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) {
1493 _env = env;
1494 _tags = (jlong*)tags;
1495 _tag_count = tag_count;
1496 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true);
1497 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true);
1498 }
1500 ~TagObjectCollector() {
1501 delete _object_results;
1502 delete _tag_results;
1503 }
1505 // for each tagged object check if the tag value matches
1506 // - if it matches then we create a JNI local reference to the object
1507 // and record the reference and tag value.
1508 //
1509 void do_entry(JvmtiTagHashmapEntry* entry) {
1510 for (int i=0; i<_tag_count; i++) {
1511 if (_tags[i] == entry->tag()) {
1512 oop o = entry->object();
1513 assert(o != NULL && Universe::heap()->is_in_reserved(o), "sanity check");
1514 jobject ref = JNIHandles::make_local(JavaThread::current(), o);
1515 _object_results->append(ref);
1516 _tag_results->append((uint64_t)entry->tag());
1517 }
1518 }
1519 }
1521 // return the results from the collection
1522 //
1523 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1524 jvmtiError error;
1525 int count = _object_results->length();
1526 assert(count >= 0, "sanity check");
1528 // if object_result_ptr is not NULL then allocate the result and copy
1529 // in the object references.
1530 if (object_result_ptr != NULL) {
1531 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
1532 if (error != JVMTI_ERROR_NONE) {
1533 return error;
1534 }
1535 for (int i=0; i<count; i++) {
1536 (*object_result_ptr)[i] = _object_results->at(i);
1537 }
1538 }
1540 // if tag_result_ptr is not NULL then allocate the result and copy
1541 // in the tag values.
1542 if (tag_result_ptr != NULL) {
1543 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
1544 if (error != JVMTI_ERROR_NONE) {
1545 if (object_result_ptr != NULL) {
1546 _env->Deallocate((unsigned char*)object_result_ptr);
1547 }
1548 return error;
1549 }
1550 for (int i=0; i<count; i++) {
1551 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i);
1552 }
1553 }
1555 *count_ptr = count;
1556 return JVMTI_ERROR_NONE;
1557 }
1558 };
1560 // return the list of objects with the specified tags
1561 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags,
1562 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) {
1564 TagObjectCollector collector(env(), tags, count);
1565 {
1566 // iterate over all tagged objects
1567 MutexLocker ml(lock());
1568 entry_iterate(&collector);
1569 }
1570 return collector.result(count_ptr, object_result_ptr, tag_result_ptr);
1571 }
1574 // ObjectMarker is used to support the marking objects when walking the
1575 // heap.
1576 //
1577 // This implementation uses the existing mark bits in an object for
1578 // marking. Objects that are marked must later have their headers restored.
1579 // As most objects are unlocked and don't have their identity hash computed
1580 // we don't have to save their headers. Instead we save the headers that
1581 // are "interesting". Later when the headers are restored this implementation
1582 // restores all headers to their initial value and then restores the few
1583 // objects that had interesting headers.
1584 //
1585 // Future work: This implementation currently uses growable arrays to save
1586 // the oop and header of interesting objects. As an optimization we could
1587 // use the same technique as the GC and make use of the unused area
1588 // between top() and end().
1589 //
1591 // An ObjectClosure used to restore the mark bits of an object
1592 class RestoreMarksClosure : public ObjectClosure {
1593 public:
1594 void do_object(oop o) {
1595 if (o != NULL) {
1596 markOop mark = o->mark();
1597 if (mark->is_marked()) {
1598 o->init_mark();
1599 }
1600 }
1601 }
1602 };
1604 // ObjectMarker provides the mark and visited functions
1605 class ObjectMarker : AllStatic {
1606 private:
1607 // saved headers
1608 static GrowableArray<oop>* _saved_oop_stack;
1609 static GrowableArray<markOop>* _saved_mark_stack;
1610 static bool _needs_reset; // do we need to reset mark bits?
1612 public:
1613 static void init(); // initialize
1614 static void done(); // clean-up
1616 static inline void mark(oop o); // mark an object
1617 static inline bool visited(oop o); // check if object has been visited
1619 static inline bool needs_reset() { return _needs_reset; }
1620 static inline void set_needs_reset(bool v) { _needs_reset = v; }
1621 };
1623 GrowableArray<oop>* ObjectMarker::_saved_oop_stack = NULL;
1624 GrowableArray<markOop>* ObjectMarker::_saved_mark_stack = NULL;
1625 bool ObjectMarker::_needs_reset = true; // need to reset mark bits by default
1627 // initialize ObjectMarker - prepares for object marking
1628 void ObjectMarker::init() {
1629 assert(Thread::current()->is_VM_thread(), "must be VMThread");
1631 // prepare heap for iteration
1632 Universe::heap()->ensure_parsability(false); // no need to retire TLABs
1634 // create stacks for interesting headers
1635 _saved_mark_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<markOop>(4000, true);
1636 _saved_oop_stack = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(4000, true);
1638 if (UseBiasedLocking) {
1639 BiasedLocking::preserve_marks();
1640 }
1641 }
1643 // Object marking is done so restore object headers
1644 void ObjectMarker::done() {
1645 // iterate over all objects and restore the mark bits to
1646 // their initial value
1647 RestoreMarksClosure blk;
1648 if (needs_reset()) {
1649 Universe::heap()->object_iterate(&blk);
1650 } else {
1651 // We don't need to reset mark bits on this call, but reset the
1652 // flag to the default for the next call.
1653 set_needs_reset(true);
1654 }
1656 // now restore the interesting headers
1657 for (int i = 0; i < _saved_oop_stack->length(); i++) {
1658 oop o = _saved_oop_stack->at(i);
1659 markOop mark = _saved_mark_stack->at(i);
1660 o->set_mark(mark);
1661 }
1663 if (UseBiasedLocking) {
1664 BiasedLocking::restore_marks();
1665 }
1667 // free the stacks
1668 delete _saved_oop_stack;
1669 delete _saved_mark_stack;
1670 }
1672 // mark an object
1673 inline void ObjectMarker::mark(oop o) {
1674 assert(Universe::heap()->is_in(o), "sanity check");
1675 assert(!o->mark()->is_marked(), "should only mark an object once");
1677 // object's mark word
1678 markOop mark = o->mark();
1680 if (mark->must_be_preserved(o)) {
1681 _saved_mark_stack->push(mark);
1682 _saved_oop_stack->push(o);
1683 }
1685 // mark the object
1686 o->set_mark(markOopDesc::prototype()->set_marked());
1687 }
1689 // return true if object is marked
1690 inline bool ObjectMarker::visited(oop o) {
1691 return o->mark()->is_marked();
1692 }
1694 // Stack allocated class to help ensure that ObjectMarker is used
1695 // correctly. Constructor initializes ObjectMarker, destructor calls
1696 // ObjectMarker's done() function to restore object headers.
1697 class ObjectMarkerController : public StackObj {
1698 public:
1699 ObjectMarkerController() {
1700 ObjectMarker::init();
1701 }
1702 ~ObjectMarkerController() {
1703 ObjectMarker::done();
1704 }
1705 };
1708 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind
1709 // (not performance critical as only used for roots)
1710 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) {
1711 switch (kind) {
1712 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL;
1713 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS;
1714 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR;
1715 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL;
1716 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL;
1717 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD;
1718 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER;
1719 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER;
1720 }
1721 }
1723 // Base class for all heap walk contexts. The base class maintains a flag
1724 // to indicate if the context is valid or not.
1725 class HeapWalkContext VALUE_OBJ_CLASS_SPEC {
1726 private:
1727 bool _valid;
1728 public:
1729 HeapWalkContext(bool valid) { _valid = valid; }
1730 void invalidate() { _valid = false; }
1731 bool is_valid() const { return _valid; }
1732 };
1734 // A basic heap walk context for the deprecated heap walking functions.
1735 // The context for a basic heap walk are the callbacks and fields used by
1736 // the referrer caching scheme.
1737 class BasicHeapWalkContext: public HeapWalkContext {
1738 private:
1739 jvmtiHeapRootCallback _heap_root_callback;
1740 jvmtiStackReferenceCallback _stack_ref_callback;
1741 jvmtiObjectReferenceCallback _object_ref_callback;
1743 // used for caching
1744 oop _last_referrer;
1745 jlong _last_referrer_tag;
1747 public:
1748 BasicHeapWalkContext() : HeapWalkContext(false) { }
1750 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback,
1751 jvmtiStackReferenceCallback stack_ref_callback,
1752 jvmtiObjectReferenceCallback object_ref_callback) :
1753 HeapWalkContext(true),
1754 _heap_root_callback(heap_root_callback),
1755 _stack_ref_callback(stack_ref_callback),
1756 _object_ref_callback(object_ref_callback),
1757 _last_referrer(NULL),
1758 _last_referrer_tag(0) {
1759 }
1761 // accessors
1762 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; }
1763 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; }
1764 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; }
1766 oop last_referrer() const { return _last_referrer; }
1767 void set_last_referrer(oop referrer) { _last_referrer = referrer; }
1768 jlong last_referrer_tag() const { return _last_referrer_tag; }
1769 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; }
1770 };
1772 // The advanced heap walk context for the FollowReferences functions.
1773 // The context is the callbacks, and the fields used for filtering.
1774 class AdvancedHeapWalkContext: public HeapWalkContext {
1775 private:
1776 jint _heap_filter;
1777 KlassHandle _klass_filter;
1778 const jvmtiHeapCallbacks* _heap_callbacks;
1780 public:
1781 AdvancedHeapWalkContext() : HeapWalkContext(false) { }
1783 AdvancedHeapWalkContext(jint heap_filter,
1784 KlassHandle klass_filter,
1785 const jvmtiHeapCallbacks* heap_callbacks) :
1786 HeapWalkContext(true),
1787 _heap_filter(heap_filter),
1788 _klass_filter(klass_filter),
1789 _heap_callbacks(heap_callbacks) {
1790 }
1792 // accessors
1793 jint heap_filter() const { return _heap_filter; }
1794 KlassHandle klass_filter() const { return _klass_filter; }
1796 const jvmtiHeapReferenceCallback heap_reference_callback() const {
1797 return _heap_callbacks->heap_reference_callback;
1798 };
1799 const jvmtiPrimitiveFieldCallback primitive_field_callback() const {
1800 return _heap_callbacks->primitive_field_callback;
1801 }
1802 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const {
1803 return _heap_callbacks->array_primitive_value_callback;
1804 }
1805 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const {
1806 return _heap_callbacks->string_primitive_value_callback;
1807 }
1808 };
1810 // The CallbackInvoker is a class with static functions that the heap walk can call
1811 // into to invoke callbacks. It works in one of two modes. The "basic" mode is
1812 // used for the deprecated IterateOverReachableObjects functions. The "advanced"
1813 // mode is for the newer FollowReferences function which supports a lot of
1814 // additional callbacks.
1815 class CallbackInvoker : AllStatic {
1816 private:
1817 // heap walk styles
1818 enum { basic, advanced };
1819 static int _heap_walk_type;
1820 static bool is_basic_heap_walk() { return _heap_walk_type == basic; }
1821 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; }
1823 // context for basic style heap walk
1824 static BasicHeapWalkContext _basic_context;
1825 static BasicHeapWalkContext* basic_context() {
1826 assert(_basic_context.is_valid(), "invalid");
1827 return &_basic_context;
1828 }
1830 // context for advanced style heap walk
1831 static AdvancedHeapWalkContext _advanced_context;
1832 static AdvancedHeapWalkContext* advanced_context() {
1833 assert(_advanced_context.is_valid(), "invalid");
1834 return &_advanced_context;
1835 }
1837 // context needed for all heap walks
1838 static JvmtiTagMap* _tag_map;
1839 static const void* _user_data;
1840 static GrowableArray<oop>* _visit_stack;
1842 // accessors
1843 static JvmtiTagMap* tag_map() { return _tag_map; }
1844 static const void* user_data() { return _user_data; }
1845 static GrowableArray<oop>* visit_stack() { return _visit_stack; }
1847 // if the object hasn't been visited then push it onto the visit stack
1848 // so that it will be visited later
1849 static inline bool check_for_visit(oop obj) {
1850 if (!ObjectMarker::visited(obj)) visit_stack()->push(obj);
1851 return true;
1852 }
1854 // invoke basic style callbacks
1855 static inline bool invoke_basic_heap_root_callback
1856 (jvmtiHeapRootKind root_kind, oop obj);
1857 static inline bool invoke_basic_stack_ref_callback
1858 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method,
1859 int slot, oop obj);
1860 static inline bool invoke_basic_object_reference_callback
1861 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index);
1863 // invoke advanced style callbacks
1864 static inline bool invoke_advanced_heap_root_callback
1865 (jvmtiHeapReferenceKind ref_kind, oop obj);
1866 static inline bool invoke_advanced_stack_ref_callback
1867 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth,
1868 jmethodID method, jlocation bci, jint slot, oop obj);
1869 static inline bool invoke_advanced_object_reference_callback
1870 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index);
1872 // used to report the value of primitive fields
1873 static inline bool report_primitive_field
1874 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type);
1876 public:
1877 // initialize for basic mode
1878 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1879 GrowableArray<oop>* visit_stack,
1880 const void* user_data,
1881 BasicHeapWalkContext context);
1883 // initialize for advanced mode
1884 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1885 GrowableArray<oop>* visit_stack,
1886 const void* user_data,
1887 AdvancedHeapWalkContext context);
1889 // functions to report roots
1890 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o);
1891 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth,
1892 jmethodID m, oop o);
1893 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth,
1894 jmethodID method, jlocation bci, jint slot, oop o);
1896 // functions to report references
1897 static inline bool report_array_element_reference(oop referrer, oop referree, jint index);
1898 static inline bool report_class_reference(oop referrer, oop referree);
1899 static inline bool report_class_loader_reference(oop referrer, oop referree);
1900 static inline bool report_signers_reference(oop referrer, oop referree);
1901 static inline bool report_protection_domain_reference(oop referrer, oop referree);
1902 static inline bool report_superclass_reference(oop referrer, oop referree);
1903 static inline bool report_interface_reference(oop referrer, oop referree);
1904 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot);
1905 static inline bool report_field_reference(oop referrer, oop referree, jint slot);
1906 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index);
1907 static inline bool report_primitive_array_values(oop array);
1908 static inline bool report_string_value(oop str);
1909 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type);
1910 static inline bool report_primitive_static_field(oop o, jint index, address value, char type);
1911 };
1913 // statics
1914 int CallbackInvoker::_heap_walk_type;
1915 BasicHeapWalkContext CallbackInvoker::_basic_context;
1916 AdvancedHeapWalkContext CallbackInvoker::_advanced_context;
1917 JvmtiTagMap* CallbackInvoker::_tag_map;
1918 const void* CallbackInvoker::_user_data;
1919 GrowableArray<oop>* CallbackInvoker::_visit_stack;
1921 // initialize for basic heap walk (IterateOverReachableObjects et al)
1922 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map,
1923 GrowableArray<oop>* visit_stack,
1924 const void* user_data,
1925 BasicHeapWalkContext context) {
1926 _tag_map = tag_map;
1927 _visit_stack = visit_stack;
1928 _user_data = user_data;
1929 _basic_context = context;
1930 _advanced_context.invalidate(); // will trigger assertion if used
1931 _heap_walk_type = basic;
1932 }
1934 // initialize for advanced heap walk (FollowReferences)
1935 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
1936 GrowableArray<oop>* visit_stack,
1937 const void* user_data,
1938 AdvancedHeapWalkContext context) {
1939 _tag_map = tag_map;
1940 _visit_stack = visit_stack;
1941 _user_data = user_data;
1942 _advanced_context = context;
1943 _basic_context.invalidate(); // will trigger assertion if used
1944 _heap_walk_type = advanced;
1945 }
1948 // invoke basic style heap root callback
1949 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
1950 assert(ServiceUtil::visible_oop(obj), "checking");
1952 // if we heap roots should be reported
1953 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
1954 if (cb == NULL) {
1955 return check_for_visit(obj);
1956 }
1958 CallbackWrapper wrapper(tag_map(), obj);
1959 jvmtiIterationControl control = (*cb)(root_kind,
1960 wrapper.klass_tag(),
1961 wrapper.obj_size(),
1962 wrapper.obj_tag_p(),
1963 (void*)user_data());
1964 // push root to visit stack when following references
1965 if (control == JVMTI_ITERATION_CONTINUE &&
1966 basic_context()->object_ref_callback() != NULL) {
1967 visit_stack()->push(obj);
1968 }
1969 return control != JVMTI_ITERATION_ABORT;
1970 }
1972 // invoke basic style stack ref callback
1973 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind,
1974 jlong thread_tag,
1975 jint depth,
1976 jmethodID method,
1977 jint slot,
1978 oop obj) {
1979 assert(ServiceUtil::visible_oop(obj), "checking");
1981 // if we stack refs should be reported
1982 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
1983 if (cb == NULL) {
1984 return check_for_visit(obj);
1985 }
1987 CallbackWrapper wrapper(tag_map(), obj);
1988 jvmtiIterationControl control = (*cb)(root_kind,
1989 wrapper.klass_tag(),
1990 wrapper.obj_size(),
1991 wrapper.obj_tag_p(),
1992 thread_tag,
1993 depth,
1994 method,
1995 slot,
1996 (void*)user_data());
1997 // push root to visit stack when following references
1998 if (control == JVMTI_ITERATION_CONTINUE &&
1999 basic_context()->object_ref_callback() != NULL) {
2000 visit_stack()->push(obj);
2001 }
2002 return control != JVMTI_ITERATION_ABORT;
2003 }
2005 // invoke basic style object reference callback
2006 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind,
2007 oop referrer,
2008 oop referree,
2009 jint index) {
2011 assert(ServiceUtil::visible_oop(referrer), "checking");
2012 assert(ServiceUtil::visible_oop(referree), "checking");
2014 BasicHeapWalkContext* context = basic_context();
2016 // callback requires the referrer's tag. If it's the same referrer
2017 // as the last call then we use the cached value.
2018 jlong referrer_tag;
2019 if (referrer == context->last_referrer()) {
2020 referrer_tag = context->last_referrer_tag();
2021 } else {
2022 referrer_tag = tag_for(tag_map(), referrer);
2023 }
2025 // do the callback
2026 CallbackWrapper wrapper(tag_map(), referree);
2027 jvmtiObjectReferenceCallback cb = context->object_ref_callback();
2028 jvmtiIterationControl control = (*cb)(ref_kind,
2029 wrapper.klass_tag(),
2030 wrapper.obj_size(),
2031 wrapper.obj_tag_p(),
2032 referrer_tag,
2033 index,
2034 (void*)user_data());
2036 // record referrer and referrer tag. For self-references record the
2037 // tag value from the callback as this might differ from referrer_tag.
2038 context->set_last_referrer(referrer);
2039 if (referrer == referree) {
2040 context->set_last_referrer_tag(*wrapper.obj_tag_p());
2041 } else {
2042 context->set_last_referrer_tag(referrer_tag);
2043 }
2045 if (control == JVMTI_ITERATION_CONTINUE) {
2046 return check_for_visit(referree);
2047 } else {
2048 return control != JVMTI_ITERATION_ABORT;
2049 }
2050 }
2052 // invoke advanced style heap root callback
2053 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind,
2054 oop obj) {
2055 assert(ServiceUtil::visible_oop(obj), "checking");
2057 AdvancedHeapWalkContext* context = advanced_context();
2059 // check that callback is provided
2060 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2061 if (cb == NULL) {
2062 return check_for_visit(obj);
2063 }
2065 // apply class filter
2066 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2067 return check_for_visit(obj);
2068 }
2070 // setup the callback wrapper
2071 CallbackWrapper wrapper(tag_map(), obj);
2073 // apply tag filter
2074 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2075 wrapper.klass_tag(),
2076 context->heap_filter())) {
2077 return check_for_visit(obj);
2078 }
2080 // for arrays we need the length, otherwise -1
2081 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2083 // invoke the callback
2084 jint res = (*cb)(ref_kind,
2085 NULL, // referrer info
2086 wrapper.klass_tag(),
2087 0, // referrer_class_tag is 0 for heap root
2088 wrapper.obj_size(),
2089 wrapper.obj_tag_p(),
2090 NULL, // referrer_tag_p
2091 len,
2092 (void*)user_data());
2093 if (res & JVMTI_VISIT_ABORT) {
2094 return false;// referrer class tag
2095 }
2096 if (res & JVMTI_VISIT_OBJECTS) {
2097 check_for_visit(obj);
2098 }
2099 return true;
2100 }
2102 // report a reference from a thread stack to an object
2103 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind,
2104 jlong thread_tag,
2105 jlong tid,
2106 int depth,
2107 jmethodID method,
2108 jlocation bci,
2109 jint slot,
2110 oop obj) {
2111 assert(ServiceUtil::visible_oop(obj), "checking");
2113 AdvancedHeapWalkContext* context = advanced_context();
2115 // check that callback is provider
2116 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2117 if (cb == NULL) {
2118 return check_for_visit(obj);
2119 }
2121 // apply class filter
2122 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2123 return check_for_visit(obj);
2124 }
2126 // setup the callback wrapper
2127 CallbackWrapper wrapper(tag_map(), obj);
2129 // apply tag filter
2130 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2131 wrapper.klass_tag(),
2132 context->heap_filter())) {
2133 return check_for_visit(obj);
2134 }
2136 // setup the referrer info
2137 jvmtiHeapReferenceInfo reference_info;
2138 reference_info.stack_local.thread_tag = thread_tag;
2139 reference_info.stack_local.thread_id = tid;
2140 reference_info.stack_local.depth = depth;
2141 reference_info.stack_local.method = method;
2142 reference_info.stack_local.location = bci;
2143 reference_info.stack_local.slot = slot;
2145 // for arrays we need the length, otherwise -1
2146 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2148 // call into the agent
2149 int res = (*cb)(ref_kind,
2150 &reference_info,
2151 wrapper.klass_tag(),
2152 0, // referrer_class_tag is 0 for heap root (stack)
2153 wrapper.obj_size(),
2154 wrapper.obj_tag_p(),
2155 NULL, // referrer_tag is 0 for root
2156 len,
2157 (void*)user_data());
2159 if (res & JVMTI_VISIT_ABORT) {
2160 return false;
2161 }
2162 if (res & JVMTI_VISIT_OBJECTS) {
2163 check_for_visit(obj);
2164 }
2165 return true;
2166 }
2168 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
2169 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
2170 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
2171 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
2172 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
2173 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \
2174 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \
2175 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL))
2177 // invoke the object reference callback to report a reference
2178 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind,
2179 oop referrer,
2180 oop obj,
2181 jint index)
2182 {
2183 // field index is only valid field in reference_info
2184 static jvmtiHeapReferenceInfo reference_info = { 0 };
2186 assert(ServiceUtil::visible_oop(referrer), "checking");
2187 assert(ServiceUtil::visible_oop(obj), "checking");
2189 AdvancedHeapWalkContext* context = advanced_context();
2191 // check that callback is provider
2192 jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
2193 if (cb == NULL) {
2194 return check_for_visit(obj);
2195 }
2197 // apply class filter
2198 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2199 return check_for_visit(obj);
2200 }
2202 // setup the callback wrapper
2203 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj);
2205 // apply tag filter
2206 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2207 wrapper.klass_tag(),
2208 context->heap_filter())) {
2209 return check_for_visit(obj);
2210 }
2212 // field index is only valid field in reference_info
2213 reference_info.field.index = index;
2215 // for arrays we need the length, otherwise -1
2216 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1);
2218 // invoke the callback
2219 int res = (*cb)(ref_kind,
2220 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
2221 wrapper.klass_tag(),
2222 wrapper.referrer_klass_tag(),
2223 wrapper.obj_size(),
2224 wrapper.obj_tag_p(),
2225 wrapper.referrer_tag_p(),
2226 len,
2227 (void*)user_data());
2229 if (res & JVMTI_VISIT_ABORT) {
2230 return false;
2231 }
2232 if (res & JVMTI_VISIT_OBJECTS) {
2233 check_for_visit(obj);
2234 }
2235 return true;
2236 }
2238 // report a "simple root"
2239 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) {
2240 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL &&
2241 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root");
2242 assert(ServiceUtil::visible_oop(obj), "checking");
2244 if (is_basic_heap_walk()) {
2245 // map to old style root kind
2246 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind);
2247 return invoke_basic_heap_root_callback(root_kind, obj);
2248 } else {
2249 assert(is_advanced_heap_walk(), "wrong heap walk type");
2250 return invoke_advanced_heap_root_callback(kind, obj);
2251 }
2252 }
2255 // invoke the primitive array values
2256 inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
2257 assert(obj->is_typeArray(), "not a primitive array");
2259 AdvancedHeapWalkContext* context = advanced_context();
2260 assert(context->array_primitive_value_callback() != NULL, "no callback");
2262 // apply class filter
2263 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2264 return true;
2265 }
2267 CallbackWrapper wrapper(tag_map(), obj);
2269 // apply tag filter
2270 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2271 wrapper.klass_tag(),
2272 context->heap_filter())) {
2273 return true;
2274 }
2276 // invoke the callback
2277 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(),
2278 &wrapper,
2279 obj,
2280 (void*)user_data());
2281 return (!(res & JVMTI_VISIT_ABORT));
2282 }
2284 // invoke the string value callback
2285 inline bool CallbackInvoker::report_string_value(oop str) {
2286 assert(str->klass() == SystemDictionary::String_klass(), "not a string");
2288 AdvancedHeapWalkContext* context = advanced_context();
2289 assert(context->string_primitive_value_callback() != NULL, "no callback");
2291 // apply class filter
2292 if (is_filtered_by_klass_filter(str, context->klass_filter())) {
2293 return true;
2294 }
2296 CallbackWrapper wrapper(tag_map(), str);
2298 // apply tag filter
2299 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2300 wrapper.klass_tag(),
2301 context->heap_filter())) {
2302 return true;
2303 }
2305 // invoke the callback
2306 int res = invoke_string_value_callback(context->string_primitive_value_callback(),
2307 &wrapper,
2308 str,
2309 (void*)user_data());
2310 return (!(res & JVMTI_VISIT_ABORT));
2311 }
2313 // invoke the primitive field callback
2314 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind,
2315 oop obj,
2316 jint index,
2317 address addr,
2318 char type)
2319 {
2320 // for primitive fields only the index will be set
2321 static jvmtiHeapReferenceInfo reference_info = { 0 };
2323 AdvancedHeapWalkContext* context = advanced_context();
2324 assert(context->primitive_field_callback() != NULL, "no callback");
2326 // apply class filter
2327 if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
2328 return true;
2329 }
2331 CallbackWrapper wrapper(tag_map(), obj);
2333 // apply tag filter
2334 if (is_filtered_by_heap_filter(wrapper.obj_tag(),
2335 wrapper.klass_tag(),
2336 context->heap_filter())) {
2337 return true;
2338 }
2340 // the field index in the referrer
2341 reference_info.field.index = index;
2343 // map the type
2344 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type;
2346 // setup the jvalue
2347 jvalue value;
2348 copy_to_jvalue(&value, addr, value_type);
2350 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback();
2351 int res = (*cb)(ref_kind,
2352 &reference_info,
2353 wrapper.klass_tag(),
2354 wrapper.obj_tag_p(),
2355 value,
2356 value_type,
2357 (void*)user_data());
2358 return (!(res & JVMTI_VISIT_ABORT));
2359 }
2362 // instance field
2363 inline bool CallbackInvoker::report_primitive_instance_field(oop obj,
2364 jint index,
2365 address value,
2366 char type) {
2367 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD,
2368 obj,
2369 index,
2370 value,
2371 type);
2372 }
2374 // static field
2375 inline bool CallbackInvoker::report_primitive_static_field(oop obj,
2376 jint index,
2377 address value,
2378 char type) {
2379 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD,
2380 obj,
2381 index,
2382 value,
2383 type);
2384 }
2386 // report a JNI local (root object) to the profiler
2387 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) {
2388 if (is_basic_heap_walk()) {
2389 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL,
2390 thread_tag,
2391 depth,
2392 m,
2393 -1,
2394 obj);
2395 } else {
2396 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL,
2397 thread_tag, tid,
2398 depth,
2399 m,
2400 (jlocation)-1,
2401 -1,
2402 obj);
2403 }
2404 }
2407 // report a local (stack reference, root object)
2408 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag,
2409 jlong tid,
2410 jint depth,
2411 jmethodID method,
2412 jlocation bci,
2413 jint slot,
2414 oop obj) {
2415 if (is_basic_heap_walk()) {
2416 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL,
2417 thread_tag,
2418 depth,
2419 method,
2420 slot,
2421 obj);
2422 } else {
2423 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL,
2424 thread_tag,
2425 tid,
2426 depth,
2427 method,
2428 bci,
2429 slot,
2430 obj);
2431 }
2432 }
2434 // report an object referencing a class.
2435 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) {
2436 if (is_basic_heap_walk()) {
2437 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2438 } else {
2439 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1);
2440 }
2441 }
2443 // report a class referencing its class loader.
2444 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) {
2445 if (is_basic_heap_walk()) {
2446 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2447 } else {
2448 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1);
2449 }
2450 }
2452 // report a class referencing its signers.
2453 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) {
2454 if (is_basic_heap_walk()) {
2455 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1);
2456 } else {
2457 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1);
2458 }
2459 }
2461 // report a class referencing its protection domain..
2462 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) {
2463 if (is_basic_heap_walk()) {
2464 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2465 } else {
2466 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1);
2467 }
2468 }
2470 // report a class referencing its superclass.
2471 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) {
2472 if (is_basic_heap_walk()) {
2473 // Send this to be consistent with past implementation
2474 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1);
2475 } else {
2476 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1);
2477 }
2478 }
2480 // report a class referencing one of its interfaces.
2481 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) {
2482 if (is_basic_heap_walk()) {
2483 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1);
2484 } else {
2485 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1);
2486 }
2487 }
2489 // report a class referencing one of its static fields.
2490 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) {
2491 if (is_basic_heap_walk()) {
2492 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2493 } else {
2494 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot);
2495 }
2496 }
2498 // report an array referencing an element object
2499 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) {
2500 if (is_basic_heap_walk()) {
2501 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2502 } else {
2503 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index);
2504 }
2505 }
2507 // report an object referencing an instance field object
2508 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) {
2509 if (is_basic_heap_walk()) {
2510 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot);
2511 } else {
2512 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot);
2513 }
2514 }
2516 // report an array referencing an element object
2517 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) {
2518 if (is_basic_heap_walk()) {
2519 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2520 } else {
2521 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index);
2522 }
2523 }
2525 // A supporting closure used to process simple roots
2526 class SimpleRootsClosure : public OopClosure {
2527 private:
2528 jvmtiHeapReferenceKind _kind;
2529 bool _continue;
2531 jvmtiHeapReferenceKind root_kind() { return _kind; }
2533 public:
2534 void set_kind(jvmtiHeapReferenceKind kind) {
2535 _kind = kind;
2536 _continue = true;
2537 }
2539 inline bool stopped() {
2540 return !_continue;
2541 }
2543 void do_oop(oop* obj_p) {
2544 // iteration has terminated
2545 if (stopped()) {
2546 return;
2547 }
2549 // ignore null or deleted handles
2550 oop o = *obj_p;
2551 if (o == NULL || o == JNIHandles::deleted_handle()) {
2552 return;
2553 }
2555 assert(Universe::heap()->is_in_reserved(o), "should be impossible");
2557 jvmtiHeapReferenceKind kind = root_kind();
2558 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) {
2559 // SystemDictionary::always_strong_oops_do reports the application
2560 // class loader as a root. We want this root to be reported as
2561 // a root kind of "OTHER" rather than "SYSTEM_CLASS".
2562 if (!o->is_instanceMirror()) {
2563 kind = JVMTI_HEAP_REFERENCE_OTHER;
2564 }
2565 }
2567 // some objects are ignored - in the case of simple
2568 // roots it's mostly Symbol*s that we are skipping
2569 // here.
2570 if (!ServiceUtil::visible_oop(o)) {
2571 return;
2572 }
2574 // invoke the callback
2575 _continue = CallbackInvoker::report_simple_root(kind, o);
2577 }
2578 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2579 };
2581 // A supporting closure used to process JNI locals
2582 class JNILocalRootsClosure : public OopClosure {
2583 private:
2584 jlong _thread_tag;
2585 jlong _tid;
2586 jint _depth;
2587 jmethodID _method;
2588 bool _continue;
2589 public:
2590 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) {
2591 _thread_tag = thread_tag;
2592 _tid = tid;
2593 _depth = depth;
2594 _method = method;
2595 _continue = true;
2596 }
2598 inline bool stopped() {
2599 return !_continue;
2600 }
2602 void do_oop(oop* obj_p) {
2603 // iteration has terminated
2604 if (stopped()) {
2605 return;
2606 }
2608 // ignore null or deleted handles
2609 oop o = *obj_p;
2610 if (o == NULL || o == JNIHandles::deleted_handle()) {
2611 return;
2612 }
2614 if (!ServiceUtil::visible_oop(o)) {
2615 return;
2616 }
2618 // invoke the callback
2619 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o);
2620 }
2621 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); }
2622 };
2625 // A VM operation to iterate over objects that are reachable from
2626 // a set of roots or an initial object.
2627 //
2628 // For VM_HeapWalkOperation the set of roots used is :-
2629 //
2630 // - All JNI global references
2631 // - All inflated monitors
2632 // - All classes loaded by the boot class loader (or all classes
2633 // in the event that class unloading is disabled)
2634 // - All java threads
2635 // - For each java thread then all locals and JNI local references
2636 // on the thread's execution stack
2637 // - All visible/explainable objects from Universes::oops_do
2638 //
2639 class VM_HeapWalkOperation: public VM_Operation {
2640 private:
2641 enum {
2642 initial_visit_stack_size = 4000
2643 };
2645 bool _is_advanced_heap_walk; // indicates FollowReferences
2646 JvmtiTagMap* _tag_map;
2647 Handle _initial_object;
2648 GrowableArray<oop>* _visit_stack; // the visit stack
2650 bool _collecting_heap_roots; // are we collecting roots
2651 bool _following_object_refs; // are we following object references
2653 bool _reporting_primitive_fields; // optional reporting
2654 bool _reporting_primitive_array_values;
2655 bool _reporting_string_values;
2657 GrowableArray<oop>* create_visit_stack() {
2658 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true);
2659 }
2661 // accessors
2662 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; }
2663 JvmtiTagMap* tag_map() const { return _tag_map; }
2664 Handle initial_object() const { return _initial_object; }
2666 bool is_following_references() const { return _following_object_refs; }
2668 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; }
2669 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; }
2670 bool is_reporting_string_values() const { return _reporting_string_values; }
2672 GrowableArray<oop>* visit_stack() const { return _visit_stack; }
2674 // iterate over the various object types
2675 inline bool iterate_over_array(oop o);
2676 inline bool iterate_over_type_array(oop o);
2677 inline bool iterate_over_class(oop o);
2678 inline bool iterate_over_object(oop o);
2680 // root collection
2681 inline bool collect_simple_roots();
2682 inline bool collect_stack_roots();
2683 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk);
2685 // visit an object
2686 inline bool visit(oop o);
2688 public:
2689 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2690 Handle initial_object,
2691 BasicHeapWalkContext callbacks,
2692 const void* user_data);
2694 VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2695 Handle initial_object,
2696 AdvancedHeapWalkContext callbacks,
2697 const void* user_data);
2699 ~VM_HeapWalkOperation();
2701 VMOp_Type type() const { return VMOp_HeapWalkOperation; }
2702 void doit();
2703 };
2706 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2707 Handle initial_object,
2708 BasicHeapWalkContext callbacks,
2709 const void* user_data) {
2710 _is_advanced_heap_walk = false;
2711 _tag_map = tag_map;
2712 _initial_object = initial_object;
2713 _following_object_refs = (callbacks.object_ref_callback() != NULL);
2714 _reporting_primitive_fields = false;
2715 _reporting_primitive_array_values = false;
2716 _reporting_string_values = false;
2717 _visit_stack = create_visit_stack();
2720 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2721 }
2723 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
2724 Handle initial_object,
2725 AdvancedHeapWalkContext callbacks,
2726 const void* user_data) {
2727 _is_advanced_heap_walk = true;
2728 _tag_map = tag_map;
2729 _initial_object = initial_object;
2730 _following_object_refs = true;
2731 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
2732 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
2733 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
2734 _visit_stack = create_visit_stack();
2736 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks);
2737 }
2739 VM_HeapWalkOperation::~VM_HeapWalkOperation() {
2740 if (_following_object_refs) {
2741 assert(_visit_stack != NULL, "checking");
2742 delete _visit_stack;
2743 _visit_stack = NULL;
2744 }
2745 }
2747 // an array references its class and has a reference to
2748 // each element in the array
2749 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
2750 objArrayOop array = objArrayOop(o);
2752 // array reference to its class
2753 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror();
2754 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2755 return false;
2756 }
2758 // iterate over the array and report each reference to a
2759 // non-null element
2760 for (int index=0; index<array->length(); index++) {
2761 oop elem = array->obj_at(index);
2762 if (elem == NULL) {
2763 continue;
2764 }
2766 // report the array reference o[index] = elem
2767 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) {
2768 return false;
2769 }
2770 }
2771 return true;
2772 }
2774 // a type array references its class
2775 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) {
2776 Klass* k = o->klass();
2777 oop mirror = k->java_mirror();
2778 if (!CallbackInvoker::report_class_reference(o, mirror)) {
2779 return false;
2780 }
2782 // report the array contents if required
2783 if (is_reporting_primitive_array_values()) {
2784 if (!CallbackInvoker::report_primitive_array_values(o)) {
2785 return false;
2786 }
2787 }
2788 return true;
2789 }
2791 // verify that a static oop field is in range
2792 static inline bool verify_static_oop(InstanceKlass* ik,
2793 oop mirror, int offset) {
2794 address obj_p = (address)mirror + offset;
2795 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror);
2796 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize);
2797 assert(end >= start, "sanity check");
2799 if (obj_p >= start && obj_p < end) {
2800 return true;
2801 } else {
2802 return false;
2803 }
2804 }
2806 // a class references its super class, interfaces, class loader, ...
2807 // and finally its static fields
2808 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
2809 int i;
2810 Klass* klass = java_lang_Class::as_Klass(java_class);
2812 if (klass->oop_is_instance()) {
2813 InstanceKlass* ik = InstanceKlass::cast(klass);
2815 // ignore the class if it's has been initialized yet
2816 if (!ik->is_linked()) {
2817 return true;
2818 }
2820 // get the java mirror
2821 oop mirror = klass->java_mirror();
2823 // super (only if something more interesting than java.lang.Object)
2824 Klass* java_super = ik->java_super();
2825 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) {
2826 oop super = java_super->java_mirror();
2827 if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
2828 return false;
2829 }
2830 }
2832 // class loader
2833 oop cl = ik->class_loader();
2834 if (cl != NULL) {
2835 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
2836 return false;
2837 }
2838 }
2840 // protection domain
2841 oop pd = ik->protection_domain();
2842 if (pd != NULL) {
2843 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
2844 return false;
2845 }
2846 }
2848 // signers
2849 oop signers = ik->signers();
2850 if (signers != NULL) {
2851 if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
2852 return false;
2853 }
2854 }
2856 // references from the constant pool
2857 {
2858 ConstantPool* const pool = ik->constants();
2859 for (int i = 1; i < pool->length(); i++) {
2860 constantTag tag = pool->tag_at(i).value();
2861 if (tag.is_string() || tag.is_klass()) {
2862 oop entry;
2863 if (tag.is_string()) {
2864 entry = pool->resolved_string_at(i);
2865 // If the entry is non-null it is resolved.
2866 if (entry == NULL) continue;
2867 } else {
2868 entry = pool->resolved_klass_at(i)->java_mirror();
2869 }
2870 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) {
2871 return false;
2872 }
2873 }
2874 }
2875 }
2877 // interfaces
2878 // (These will already have been reported as references from the constant pool
2879 // but are specified by IterateOverReachableObjects and must be reported).
2880 Array<Klass*>* interfaces = ik->local_interfaces();
2881 for (i = 0; i < interfaces->length(); i++) {
2882 oop interf = ((Klass*)interfaces->at(i))->java_mirror();
2883 if (interf == NULL) {
2884 continue;
2885 }
2886 if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
2887 return false;
2888 }
2889 }
2891 // iterate over the static fields
2893 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass);
2894 for (i=0; i<field_map->field_count(); i++) {
2895 ClassFieldDescriptor* field = field_map->field_at(i);
2896 char type = field->field_type();
2897 if (!is_primitive_field_type(type)) {
2898 oop fld_o = mirror->obj_field(field->field_offset());
2899 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
2900 if (fld_o != NULL) {
2901 int slot = field->field_index();
2902 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
2903 delete field_map;
2904 return false;
2905 }
2906 }
2907 } else {
2908 if (is_reporting_primitive_fields()) {
2909 address addr = (address)mirror + field->field_offset();
2910 int slot = field->field_index();
2911 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) {
2912 delete field_map;
2913 return false;
2914 }
2915 }
2916 }
2917 }
2918 delete field_map;
2920 return true;
2921 }
2923 return true;
2924 }
2926 // an object references a class and its instance fields
2927 // (static fields are ignored here as we report these as
2928 // references from the class).
2929 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
2930 // reference to the class
2931 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) {
2932 return false;
2933 }
2935 // iterate over instance fields
2936 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o);
2937 for (int i=0; i<field_map->field_count(); i++) {
2938 ClassFieldDescriptor* field = field_map->field_at(i);
2939 char type = field->field_type();
2940 if (!is_primitive_field_type(type)) {
2941 oop fld_o = o->obj_field(field->field_offset());
2942 // ignore any objects that aren't visible to profiler
2943 if (fld_o != NULL && ServiceUtil::visible_oop(fld_o)) {
2944 assert(Universe::heap()->is_in_reserved(fld_o), "unsafe code should not "
2945 "have references to Klass* anymore");
2946 int slot = field->field_index();
2947 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) {
2948 return false;
2949 }
2950 }
2951 } else {
2952 if (is_reporting_primitive_fields()) {
2953 // primitive instance field
2954 address addr = (address)o + field->field_offset();
2955 int slot = field->field_index();
2956 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) {
2957 return false;
2958 }
2959 }
2960 }
2961 }
2963 // if the object is a java.lang.String
2964 if (is_reporting_string_values() &&
2965 o->klass() == SystemDictionary::String_klass()) {
2966 if (!CallbackInvoker::report_string_value(o)) {
2967 return false;
2968 }
2969 }
2970 return true;
2971 }
2974 // Collects all simple (non-stack) roots except for threads;
2975 // threads are handled in collect_stack_roots() as an optimization.
2976 // if there's a heap root callback provided then the callback is
2977 // invoked for each simple root.
2978 // if an object reference callback is provided then all simple
2979 // roots are pushed onto the marking stack so that they can be
2980 // processed later
2981 //
2982 inline bool VM_HeapWalkOperation::collect_simple_roots() {
2983 SimpleRootsClosure blk;
2985 // JNI globals
2986 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL);
2987 JNIHandles::oops_do(&blk);
2988 if (blk.stopped()) {
2989 return false;
2990 }
2992 // Preloaded classes and loader from the system dictionary
2993 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS);
2994 SystemDictionary::always_strong_oops_do(&blk);
2995 KlassToOopClosure klass_blk(&blk);
2996 ClassLoaderDataGraph::always_strong_oops_do(&blk, &klass_blk, false);
2997 if (blk.stopped()) {
2998 return false;
2999 }
3001 // Inflated monitors
3002 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR);
3003 ObjectSynchronizer::oops_do(&blk);
3004 if (blk.stopped()) {
3005 return false;
3006 }
3008 // threads are now handled in collect_stack_roots()
3010 // Other kinds of roots maintained by HotSpot
3011 // Many of these won't be visible but others (such as instances of important
3012 // exceptions) will be visible.
3013 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3014 Universe::oops_do(&blk);
3016 // If there are any non-perm roots in the code cache, visit them.
3017 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER);
3018 CodeBlobToOopClosure look_in_blobs(&blk, false);
3019 CodeCache::scavenge_root_nmethods_do(&look_in_blobs);
3021 return true;
3022 }
3024 // Walk the stack of a given thread and find all references (locals
3025 // and JNI calls) and report these as stack references
3026 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
3027 JNILocalRootsClosure* blk)
3028 {
3029 oop threadObj = java_thread->threadObj();
3030 assert(threadObj != NULL, "sanity check");
3032 // only need to get the thread's tag once per thread
3033 jlong thread_tag = tag_for(_tag_map, threadObj);
3035 // also need the thread id
3036 jlong tid = java_lang_Thread::thread_id(threadObj);
3039 if (java_thread->has_last_Java_frame()) {
3041 // vframes are resource allocated
3042 Thread* current_thread = Thread::current();
3043 ResourceMark rm(current_thread);
3044 HandleMark hm(current_thread);
3046 RegisterMap reg_map(java_thread);
3047 frame f = java_thread->last_frame();
3048 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread);
3050 bool is_top_frame = true;
3051 int depth = 0;
3052 frame* last_entry_frame = NULL;
3054 while (vf != NULL) {
3055 if (vf->is_java_frame()) {
3057 // java frame (interpreted, compiled, ...)
3058 javaVFrame *jvf = javaVFrame::cast(vf);
3060 // the jmethodID
3061 jmethodID method = jvf->method()->jmethod_id();
3063 if (!(jvf->method()->is_native())) {
3064 jlocation bci = (jlocation)jvf->bci();
3065 StackValueCollection* locals = jvf->locals();
3066 for (int slot=0; slot<locals->size(); slot++) {
3067 if (locals->at(slot)->type() == T_OBJECT) {
3068 oop o = locals->obj_at(slot)();
3069 if (o == NULL) {
3070 continue;
3071 }
3073 // stack reference
3074 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method,
3075 bci, slot, o)) {
3076 return false;
3077 }
3078 }
3079 }
3080 } else {
3081 blk->set_context(thread_tag, tid, depth, method);
3082 if (is_top_frame) {
3083 // JNI locals for the top frame.
3084 java_thread->active_handles()->oops_do(blk);
3085 } else {
3086 if (last_entry_frame != NULL) {
3087 // JNI locals for the entry frame
3088 assert(last_entry_frame->is_entry_frame(), "checking");
3089 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
3090 }
3091 }
3092 }
3093 last_entry_frame = NULL;
3094 depth++;
3095 } else {
3096 // externalVFrame - for an entry frame then we report the JNI locals
3097 // when we find the corresponding javaVFrame
3098 frame* fr = vf->frame_pointer();
3099 assert(fr != NULL, "sanity check");
3100 if (fr->is_entry_frame()) {
3101 last_entry_frame = fr;
3102 }
3103 }
3105 vf = vf->sender();
3106 is_top_frame = false;
3107 }
3108 } else {
3109 // no last java frame but there may be JNI locals
3110 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
3111 java_thread->active_handles()->oops_do(blk);
3112 }
3113 return true;
3114 }
3117 // Collects the simple roots for all threads and collects all
3118 // stack roots - for each thread it walks the execution
3119 // stack to find all references and local JNI refs.
3120 inline bool VM_HeapWalkOperation::collect_stack_roots() {
3121 JNILocalRootsClosure blk;
3122 for (JavaThread* thread = Threads::first(); thread != NULL ; thread = thread->next()) {
3123 oop threadObj = thread->threadObj();
3124 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
3125 // Collect the simple root for this thread before we
3126 // collect its stack roots
3127 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
3128 threadObj)) {
3129 return false;
3130 }
3131 if (!collect_stack_roots(thread, &blk)) {
3132 return false;
3133 }
3134 }
3135 }
3136 return true;
3137 }
3139 // visit an object
3140 // first mark the object as visited
3141 // second get all the outbound references from this object (in other words, all
3142 // the objects referenced by this object).
3143 //
3144 bool VM_HeapWalkOperation::visit(oop o) {
3145 // mark object as visited
3146 assert(!ObjectMarker::visited(o), "can't visit same object more than once");
3147 ObjectMarker::mark(o);
3149 // instance
3150 if (o->is_instance()) {
3151 if (o->klass() == SystemDictionary::Class_klass()) {
3152 if (!java_lang_Class::is_primitive(o)) {
3153 // a java.lang.Class
3154 return iterate_over_class(o);
3155 }
3156 } else {
3157 return iterate_over_object(o);
3158 }
3159 }
3161 // object array
3162 if (o->is_objArray()) {
3163 return iterate_over_array(o);
3164 }
3166 // type array
3167 if (o->is_typeArray()) {
3168 return iterate_over_type_array(o);
3169 }
3171 return true;
3172 }
3174 void VM_HeapWalkOperation::doit() {
3175 ResourceMark rm;
3176 ObjectMarkerController marker;
3177 ClassFieldMapCacheMark cm;
3179 assert(visit_stack()->is_empty(), "visit stack must be empty");
3181 // the heap walk starts with an initial object or the heap roots
3182 if (initial_object().is_null()) {
3183 // If either collect_stack_roots() or collect_simple_roots()
3184 // returns false at this point, then there are no mark bits
3185 // to reset.
3186 ObjectMarker::set_needs_reset(false);
3188 // Calling collect_stack_roots() before collect_simple_roots()
3189 // can result in a big performance boost for an agent that is
3190 // focused on analyzing references in the thread stacks.
3191 if (!collect_stack_roots()) return;
3193 if (!collect_simple_roots()) return;
3195 // no early return so enable heap traversal to reset the mark bits
3196 ObjectMarker::set_needs_reset(true);
3197 } else {
3198 visit_stack()->push(initial_object()());
3199 }
3201 // object references required
3202 if (is_following_references()) {
3204 // visit each object until all reachable objects have been
3205 // visited or the callback asked to terminate the iteration.
3206 while (!visit_stack()->is_empty()) {
3207 oop o = visit_stack()->pop();
3208 if (!ObjectMarker::visited(o)) {
3209 if (!visit(o)) {
3210 break;
3211 }
3212 }
3213 }
3214 }
3215 }
3217 // iterate over all objects that are reachable from a set of roots
3218 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback,
3219 jvmtiStackReferenceCallback stack_ref_callback,
3220 jvmtiObjectReferenceCallback object_ref_callback,
3221 const void* user_data) {
3222 MutexLocker ml(Heap_lock);
3223 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback);
3224 VM_HeapWalkOperation op(this, Handle(), context, user_data);
3225 VMThread::execute(&op);
3226 }
3228 // iterate over all objects that are reachable from a given object
3229 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
3230 jvmtiObjectReferenceCallback object_ref_callback,
3231 const void* user_data) {
3232 oop obj = JNIHandles::resolve(object);
3233 Handle initial_object(Thread::current(), obj);
3235 MutexLocker ml(Heap_lock);
3236 BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
3237 VM_HeapWalkOperation op(this, initial_object, context, user_data);
3238 VMThread::execute(&op);
3239 }
3241 // follow references from an initial object or the GC roots
3242 void JvmtiTagMap::follow_references(jint heap_filter,
3243 KlassHandle klass,
3244 jobject object,
3245 const jvmtiHeapCallbacks* callbacks,
3246 const void* user_data)
3247 {
3248 oop obj = JNIHandles::resolve(object);
3249 Handle initial_object(Thread::current(), obj);
3251 MutexLocker ml(Heap_lock);
3252 AdvancedHeapWalkContext context(heap_filter, klass, callbacks);
3253 VM_HeapWalkOperation op(this, initial_object, context, user_data);
3254 VMThread::execute(&op);
3255 }
3258 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) {
3259 // No locks during VM bring-up (0 threads) and no safepoints after main
3260 // thread creation and before VMThread creation (1 thread); initial GC
3261 // verification can happen in that window which gets to here.
3262 assert(Threads::number_of_threads() <= 1 ||
3263 SafepointSynchronize::is_at_safepoint(),
3264 "must be executed at a safepoint");
3265 if (JvmtiEnv::environments_might_exist()) {
3266 JvmtiEnvIterator it;
3267 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
3268 JvmtiTagMap* tag_map = env->tag_map();
3269 if (tag_map != NULL && !tag_map->is_empty()) {
3270 tag_map->do_weak_oops(is_alive, f);
3271 }
3272 }
3273 }
3274 }
3276 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) {
3278 // does this environment have the OBJECT_FREE event enabled
3279 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE);
3281 // counters used for trace message
3282 int freed = 0;
3283 int moved = 0;
3285 JvmtiTagHashmap* hashmap = this->hashmap();
3287 // reenable sizing (if disabled)
3288 hashmap->set_resizing_enabled(true);
3290 // if the hashmap is empty then we can skip it
3291 if (hashmap->_entry_count == 0) {
3292 return;
3293 }
3295 // now iterate through each entry in the table
3297 JvmtiTagHashmapEntry** table = hashmap->table();
3298 int size = hashmap->size();
3300 JvmtiTagHashmapEntry* delayed_add = NULL;
3302 for (int pos = 0; pos < size; ++pos) {
3303 JvmtiTagHashmapEntry* entry = table[pos];
3304 JvmtiTagHashmapEntry* prev = NULL;
3306 while (entry != NULL) {
3307 JvmtiTagHashmapEntry* next = entry->next();
3309 oop* obj = entry->object_addr();
3311 // has object been GC'ed
3312 if (!is_alive->do_object_b(entry->object())) {
3313 // grab the tag
3314 jlong tag = entry->tag();
3315 guarantee(tag != 0, "checking");
3317 // remove GC'ed entry from hashmap and return the
3318 // entry to the free list
3319 hashmap->remove(prev, pos, entry);
3320 destroy_entry(entry);
3322 // post the event to the profiler
3323 if (post_object_free) {
3324 JvmtiExport::post_object_free(env(), tag);
3325 }
3327 ++freed;
3328 } else {
3329 f->do_oop(entry->object_addr());
3330 oop new_oop = entry->object();
3332 // if the object has moved then re-hash it and move its
3333 // entry to its new location.
3334 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size);
3335 if (new_pos != (unsigned int)pos) {
3336 if (prev == NULL) {
3337 table[pos] = next;
3338 } else {
3339 prev->set_next(next);
3340 }
3341 if (new_pos < (unsigned int)pos) {
3342 entry->set_next(table[new_pos]);
3343 table[new_pos] = entry;
3344 } else {
3345 // Delay adding this entry to it's new position as we'd end up
3346 // hitting it again during this iteration.
3347 entry->set_next(delayed_add);
3348 delayed_add = entry;
3349 }
3350 moved++;
3351 } else {
3352 // object didn't move
3353 prev = entry;
3354 }
3355 }
3357 entry = next;
3358 }
3359 }
3361 // Re-add all the entries which were kept aside
3362 while (delayed_add != NULL) {
3363 JvmtiTagHashmapEntry* next = delayed_add->next();
3364 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object(), size);
3365 delayed_add->set_next(table[pos]);
3366 table[pos] = delayed_add;
3367 delayed_add = next;
3368 }
3370 // stats
3371 if (TraceJVMTIObjectTagging) {
3372 int post_total = hashmap->_entry_count;
3373 int pre_total = post_total + freed;
3375 tty->print_cr("(%d->%d, %d freed, %d total moves)",
3376 pre_total, post_total, freed, moved);
3377 }
3378 }