src/share/vm/memory/heapInspection.cpp

Thu, 28 Jun 2012 17:03:16 -0400

author
zgu
date
Thu, 28 Jun 2012 17:03:16 -0400
changeset 3900
d2a62e0f25eb
parent 2708
1d1603768966
child 4037
da91efe96a93
permissions
-rw-r--r--

6995781: Native Memory Tracking (Phase 1)
7151532: DCmd for hotspot native memory tracking
Summary: Implementation of native memory tracking phase 1, which tracks VM native memory usage, and related DCmd
Reviewed-by: acorn, coleenp, fparain

duke@435 1 /*
trims@2708 2 * Copyright (c) 2002, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "gc_interface/collectedHeap.hpp"
stefank@2314 27 #include "memory/genCollectedHeap.hpp"
stefank@2314 28 #include "memory/heapInspection.hpp"
stefank@2314 29 #include "memory/resourceArea.hpp"
stefank@2314 30 #include "oops/klassOop.hpp"
stefank@2314 31 #include "runtime/os.hpp"
stefank@2314 32 #include "utilities/globalDefinitions.hpp"
stefank@2314 33 #ifndef SERIALGC
stefank@2314 34 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.hpp"
stefank@2314 35 #endif
duke@435 36
duke@435 37 // HeapInspection
duke@435 38
duke@435 39 int KlassInfoEntry::compare(KlassInfoEntry* e1, KlassInfoEntry* e2) {
duke@435 40 if(e1->_instance_words > e2->_instance_words) {
duke@435 41 return -1;
duke@435 42 } else if(e1->_instance_words < e2->_instance_words) {
duke@435 43 return 1;
duke@435 44 }
duke@435 45 return 0;
duke@435 46 }
duke@435 47
duke@435 48 void KlassInfoEntry::print_on(outputStream* st) const {
duke@435 49 ResourceMark rm;
duke@435 50 const char* name;;
duke@435 51 if (_klass->klass_part()->name() != NULL) {
duke@435 52 name = _klass->klass_part()->external_name();
duke@435 53 } else {
duke@435 54 if (_klass == Universe::klassKlassObj()) name = "<klassKlass>"; else
duke@435 55 if (_klass == Universe::arrayKlassKlassObj()) name = "<arrayKlassKlass>"; else
duke@435 56 if (_klass == Universe::objArrayKlassKlassObj()) name = "<objArrayKlassKlass>"; else
duke@435 57 if (_klass == Universe::instanceKlassKlassObj()) name = "<instanceKlassKlass>"; else
duke@435 58 if (_klass == Universe::typeArrayKlassKlassObj()) name = "<typeArrayKlassKlass>"; else
duke@435 59 if (_klass == Universe::boolArrayKlassObj()) name = "<boolArrayKlass>"; else
duke@435 60 if (_klass == Universe::charArrayKlassObj()) name = "<charArrayKlass>"; else
duke@435 61 if (_klass == Universe::singleArrayKlassObj()) name = "<singleArrayKlass>"; else
duke@435 62 if (_klass == Universe::doubleArrayKlassObj()) name = "<doubleArrayKlass>"; else
duke@435 63 if (_klass == Universe::byteArrayKlassObj()) name = "<byteArrayKlass>"; else
duke@435 64 if (_klass == Universe::shortArrayKlassObj()) name = "<shortArrayKlass>"; else
duke@435 65 if (_klass == Universe::intArrayKlassObj()) name = "<intArrayKlass>"; else
duke@435 66 if (_klass == Universe::longArrayKlassObj()) name = "<longArrayKlass>"; else
duke@435 67 if (_klass == Universe::methodKlassObj()) name = "<methodKlass>"; else
duke@435 68 if (_klass == Universe::constMethodKlassObj()) name = "<constMethodKlass>"; else
duke@435 69 if (_klass == Universe::methodDataKlassObj()) name = "<methodDataKlass>"; else
duke@435 70 if (_klass == Universe::constantPoolKlassObj()) name = "<constantPoolKlass>"; else
duke@435 71 if (_klass == Universe::constantPoolCacheKlassObj()) name = "<constantPoolCacheKlass>"; else
duke@435 72 if (_klass == Universe::compiledICHolderKlassObj()) name = "<compiledICHolderKlass>"; else
duke@435 73 name = "<no name>";
duke@435 74 }
duke@435 75 // simplify the formatting (ILP32 vs LP64) - always cast the numbers to 64-bit
ysr@446 76 st->print_cr(INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13) " %s",
duke@435 77 (jlong) _instance_count,
duke@435 78 (julong) _instance_words * HeapWordSize,
duke@435 79 name);
duke@435 80 }
duke@435 81
duke@435 82 KlassInfoEntry* KlassInfoBucket::lookup(const klassOop k) {
duke@435 83 KlassInfoEntry* elt = _list;
duke@435 84 while (elt != NULL) {
duke@435 85 if (elt->is_equal(k)) {
duke@435 86 return elt;
duke@435 87 }
duke@435 88 elt = elt->next();
duke@435 89 }
duke@435 90 elt = new KlassInfoEntry(k, list());
ysr@446 91 // We may be out of space to allocate the new entry.
ysr@446 92 if (elt != NULL) {
ysr@446 93 set_list(elt);
ysr@446 94 }
duke@435 95 return elt;
duke@435 96 }
duke@435 97
duke@435 98 void KlassInfoBucket::iterate(KlassInfoClosure* cic) {
duke@435 99 KlassInfoEntry* elt = _list;
duke@435 100 while (elt != NULL) {
duke@435 101 cic->do_cinfo(elt);
duke@435 102 elt = elt->next();
duke@435 103 }
duke@435 104 }
duke@435 105
duke@435 106 void KlassInfoBucket::empty() {
duke@435 107 KlassInfoEntry* elt = _list;
duke@435 108 _list = NULL;
duke@435 109 while (elt != NULL) {
duke@435 110 KlassInfoEntry* next = elt->next();
duke@435 111 delete elt;
duke@435 112 elt = next;
duke@435 113 }
duke@435 114 }
duke@435 115
duke@435 116 KlassInfoTable::KlassInfoTable(int size, HeapWord* ref) {
ysr@446 117 _size = 0;
duke@435 118 _ref = ref;
zgu@3900 119 _buckets = NEW_C_HEAP_ARRAY(KlassInfoBucket, size, mtInternal);
ysr@446 120 if (_buckets != NULL) {
ysr@446 121 _size = size;
ysr@446 122 for (int index = 0; index < _size; index++) {
ysr@446 123 _buckets[index].initialize();
ysr@446 124 }
duke@435 125 }
duke@435 126 }
duke@435 127
duke@435 128 KlassInfoTable::~KlassInfoTable() {
ysr@446 129 if (_buckets != NULL) {
ysr@446 130 for (int index = 0; index < _size; index++) {
ysr@446 131 _buckets[index].empty();
ysr@446 132 }
zgu@3900 133 FREE_C_HEAP_ARRAY(KlassInfoBucket, _buckets, mtInternal);
ysr@446 134 _size = 0;
duke@435 135 }
duke@435 136 }
duke@435 137
duke@435 138 uint KlassInfoTable::hash(klassOop p) {
duke@435 139 assert(Universe::heap()->is_in_permanent((HeapWord*)p), "all klasses in permgen");
duke@435 140 return (uint)(((uintptr_t)p - (uintptr_t)_ref) >> 2);
duke@435 141 }
duke@435 142
duke@435 143 KlassInfoEntry* KlassInfoTable::lookup(const klassOop k) {
duke@435 144 uint idx = hash(k) % _size;
ysr@446 145 assert(_buckets != NULL, "Allocation failure should have been caught");
duke@435 146 KlassInfoEntry* e = _buckets[idx].lookup(k);
ysr@446 147 // Lookup may fail if this is a new klass for which we
ysr@446 148 // could not allocate space for an new entry.
ysr@446 149 assert(e == NULL || k == e->klass(), "must be equal");
duke@435 150 return e;
duke@435 151 }
duke@435 152
ysr@446 153 // Return false if the entry could not be recorded on account
ysr@446 154 // of running out of space required to create a new entry.
ysr@446 155 bool KlassInfoTable::record_instance(const oop obj) {
duke@435 156 klassOop k = obj->klass();
duke@435 157 KlassInfoEntry* elt = lookup(k);
ysr@446 158 // elt may be NULL if it's a new klass for which we
ysr@446 159 // could not allocate space for a new entry in the hashtable.
ysr@446 160 if (elt != NULL) {
ysr@446 161 elt->set_count(elt->count() + 1);
ysr@446 162 elt->set_words(elt->words() + obj->size());
ysr@446 163 return true;
ysr@446 164 } else {
ysr@446 165 return false;
ysr@446 166 }
duke@435 167 }
duke@435 168
duke@435 169 void KlassInfoTable::iterate(KlassInfoClosure* cic) {
ysr@446 170 assert(_size == 0 || _buckets != NULL, "Allocation failure should have been caught");
duke@435 171 for (int index = 0; index < _size; index++) {
duke@435 172 _buckets[index].iterate(cic);
duke@435 173 }
duke@435 174 }
duke@435 175
duke@435 176 int KlassInfoHisto::sort_helper(KlassInfoEntry** e1, KlassInfoEntry** e2) {
duke@435 177 return (*e1)->compare(*e1,*e2);
duke@435 178 }
duke@435 179
duke@435 180 KlassInfoHisto::KlassInfoHisto(const char* title, int estimatedCount) :
duke@435 181 _title(title) {
zgu@3900 182 _elements = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<KlassInfoEntry*>(estimatedCount,true);
duke@435 183 }
duke@435 184
duke@435 185 KlassInfoHisto::~KlassInfoHisto() {
duke@435 186 delete _elements;
duke@435 187 }
duke@435 188
duke@435 189 void KlassInfoHisto::add(KlassInfoEntry* cie) {
duke@435 190 elements()->append(cie);
duke@435 191 }
duke@435 192
duke@435 193 void KlassInfoHisto::sort() {
duke@435 194 elements()->sort(KlassInfoHisto::sort_helper);
duke@435 195 }
duke@435 196
duke@435 197 void KlassInfoHisto::print_elements(outputStream* st) const {
duke@435 198 // simplify the formatting (ILP32 vs LP64) - store the sum in 64-bit
duke@435 199 jlong total = 0;
duke@435 200 julong totalw = 0;
duke@435 201 for(int i=0; i < elements()->length(); i++) {
duke@435 202 st->print("%4d: ", i+1);
duke@435 203 elements()->at(i)->print_on(st);
duke@435 204 total += elements()->at(i)->count();
duke@435 205 totalw += elements()->at(i)->words();
duke@435 206 }
ysr@446 207 st->print_cr("Total " INT64_FORMAT_W(13) " " UINT64_FORMAT_W(13),
duke@435 208 total, totalw * HeapWordSize);
duke@435 209 }
duke@435 210
duke@435 211 void KlassInfoHisto::print_on(outputStream* st) const {
duke@435 212 st->print_cr("%s",title());
duke@435 213 print_elements(st);
duke@435 214 }
duke@435 215
duke@435 216 class HistoClosure : public KlassInfoClosure {
duke@435 217 private:
duke@435 218 KlassInfoHisto* _cih;
duke@435 219 public:
duke@435 220 HistoClosure(KlassInfoHisto* cih) : _cih(cih) {}
duke@435 221
duke@435 222 void do_cinfo(KlassInfoEntry* cie) {
duke@435 223 _cih->add(cie);
duke@435 224 }
duke@435 225 };
duke@435 226
duke@435 227 class RecordInstanceClosure : public ObjectClosure {
duke@435 228 private:
duke@435 229 KlassInfoTable* _cit;
ysr@446 230 size_t _missed_count;
duke@435 231 public:
ysr@446 232 RecordInstanceClosure(KlassInfoTable* cit) :
ysr@446 233 _cit(cit), _missed_count(0) {}
duke@435 234
duke@435 235 void do_object(oop obj) {
ysr@446 236 if (!_cit->record_instance(obj)) {
ysr@446 237 _missed_count++;
ysr@446 238 }
duke@435 239 }
ysr@446 240
ysr@446 241 size_t missed_count() { return _missed_count; }
duke@435 242 };
duke@435 243
ysr@1050 244 void HeapInspection::heap_inspection(outputStream* st, bool need_prologue) {
duke@435 245 ResourceMark rm;
duke@435 246 HeapWord* ref;
duke@435 247
duke@435 248 CollectedHeap* heap = Universe::heap();
ysr@777 249 bool is_shared_heap = false;
duke@435 250 switch (heap->kind()) {
ysr@777 251 case CollectedHeap::G1CollectedHeap:
duke@435 252 case CollectedHeap::GenCollectedHeap: {
ysr@777 253 is_shared_heap = true;
ysr@777 254 SharedHeap* sh = (SharedHeap*)heap;
ysr@1050 255 if (need_prologue) {
ysr@1050 256 sh->gc_prologue(false /* !full */); // get any necessary locks, etc.
ysr@1050 257 }
ysr@777 258 ref = sh->perm_gen()->used_region().start();
duke@435 259 break;
duke@435 260 }
duke@435 261 #ifndef SERIALGC
duke@435 262 case CollectedHeap::ParallelScavengeHeap: {
duke@435 263 ParallelScavengeHeap* psh = (ParallelScavengeHeap*)heap;
duke@435 264 ref = psh->perm_gen()->object_space()->used_region().start();
duke@435 265 break;
duke@435 266 }
duke@435 267 #endif // SERIALGC
duke@435 268 default:
duke@435 269 ShouldNotReachHere(); // Unexpected heap kind for this op
duke@435 270 }
duke@435 271 // Collect klass instance info
ysr@446 272 KlassInfoTable cit(KlassInfoTable::cit_size, ref);
ysr@446 273 if (!cit.allocation_failed()) {
ysr@446 274 // Iterate over objects in the heap
ysr@446 275 RecordInstanceClosure ric(&cit);
jmasa@952 276 // If this operation encounters a bad object when using CMS,
jmasa@952 277 // consider using safe_object_iterate() which avoids perm gen
jmasa@952 278 // objects that may contain bad references.
ysr@446 279 Universe::heap()->object_iterate(&ric);
duke@435 280
ysr@446 281 // Report if certain classes are not counted because of
ysr@446 282 // running out of C-heap for the histogram.
ysr@446 283 size_t missed_count = ric.missed_count();
ysr@446 284 if (missed_count != 0) {
ysr@446 285 st->print_cr("WARNING: Ran out of C-heap; undercounted " SIZE_FORMAT
ysr@446 286 " total instances in data below",
ysr@446 287 missed_count);
ysr@446 288 }
ysr@446 289 // Sort and print klass instance info
ysr@446 290 KlassInfoHisto histo("\n"
ysr@446 291 " num #instances #bytes class name\n"
ysr@446 292 "----------------------------------------------",
ysr@446 293 KlassInfoHisto::histo_initial_size);
ysr@446 294 HistoClosure hc(&histo);
ysr@446 295 cit.iterate(&hc);
ysr@446 296 histo.sort();
ysr@446 297 histo.print_on(st);
ysr@446 298 } else {
ysr@446 299 st->print_cr("WARNING: Ran out of C-heap; histogram not generated");
ysr@446 300 }
duke@435 301 st->flush();
duke@435 302
ysr@1050 303 if (need_prologue && is_shared_heap) {
ysr@777 304 SharedHeap* sh = (SharedHeap*)heap;
ysr@777 305 sh->gc_epilogue(false /* !full */); // release all acquired locks, etc.
duke@435 306 }
duke@435 307 }
duke@435 308
duke@435 309 class FindInstanceClosure : public ObjectClosure {
duke@435 310 private:
duke@435 311 klassOop _klass;
duke@435 312 GrowableArray<oop>* _result;
duke@435 313
duke@435 314 public:
duke@435 315 FindInstanceClosure(klassOop k, GrowableArray<oop>* result) : _klass(k), _result(result) {};
duke@435 316
duke@435 317 void do_object(oop obj) {
duke@435 318 if (obj->is_a(_klass)) {
duke@435 319 _result->append(obj);
duke@435 320 }
duke@435 321 }
duke@435 322 };
duke@435 323
duke@435 324 void HeapInspection::find_instances_at_safepoint(klassOop k, GrowableArray<oop>* result) {
duke@435 325 assert(SafepointSynchronize::is_at_safepoint(), "all threads are stopped");
jcoomes@1844 326 assert(Heap_lock->is_locked(), "should have the Heap_lock");
duke@435 327
duke@435 328 // Ensure that the heap is parsable
duke@435 329 Universe::heap()->ensure_parsability(false); // no need to retire TALBs
duke@435 330
duke@435 331 // Iterate over objects in the heap
duke@435 332 FindInstanceClosure fic(k, result);
jmasa@952 333 // If this operation encounters a bad object when using CMS,
jmasa@952 334 // consider using safe_object_iterate() which avoids perm gen
jmasa@952 335 // objects that may contain bad references.
duke@435 336 Universe::heap()->object_iterate(&fic);
duke@435 337 }

mercurial