src/share/vm/services/memBaseline.cpp

Wed, 10 Oct 2012 14:35:58 -0400

author
jprovino
date
Wed, 10 Oct 2012 14:35:58 -0400
changeset 4165
fb19af007ffc
parent 3900
d2a62e0f25eb
child 4193
716c64bda5ba
permissions
-rw-r--r--

7189254: Change makefiles for more flexibility to override defaults
Summary: Change makefiles so that targets and parameters can be overridden by alternate makefiles.
Reviewed-by: dholmes, coleenp

zgu@3900 1 /*
zgu@3900 2 * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
zgu@3900 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
zgu@3900 4 *
zgu@3900 5 * This code is free software; you can redistribute it and/or modify it
zgu@3900 6 * under the terms of the GNU General Public License version 2 only, as
zgu@3900 7 * published by the Free Software Foundation.
zgu@3900 8 *
zgu@3900 9 * This code is distributed in the hope that it will be useful, but WITHOUT
zgu@3900 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
zgu@3900 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
zgu@3900 12 * version 2 for more details (a copy is included in the LICENSE file that
zgu@3900 13 * accompanied this code).
zgu@3900 14 *
zgu@3900 15 * You should have received a copy of the GNU General Public License version
zgu@3900 16 * 2 along with this work; if not, write to the Free Software Foundation,
zgu@3900 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
zgu@3900 18 *
zgu@3900 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
zgu@3900 20 * or visit www.oracle.com if you need additional information or have any
zgu@3900 21 * questions.
zgu@3900 22 *
zgu@3900 23 */
zgu@3900 24 #include "precompiled.hpp"
zgu@3900 25 #include "classfile/systemDictionary.hpp"
zgu@3900 26 #include "memory/allocation.hpp"
zgu@3900 27 #include "services/memBaseline.hpp"
zgu@3900 28 #include "services/memTracker.hpp"
zgu@3900 29
zgu@3900 30 MemType2Name MemBaseline::MemType2NameMap[NUMBER_OF_MEMORY_TYPE] = {
zgu@3900 31 {mtJavaHeap, "Java Heap"},
zgu@3900 32 {mtClass, "Class"},
zgu@3900 33 {mtThreadStack,"Thread Stack"},
zgu@3900 34 {mtThread, "Thread"},
zgu@3900 35 {mtCode, "Code"},
zgu@3900 36 {mtGC, "GC"},
zgu@3900 37 {mtCompiler, "Compiler"},
zgu@3900 38 {mtInternal, "Internal"},
zgu@3900 39 {mtOther, "Other"},
zgu@3900 40 {mtSymbol, "Symbol"},
zgu@3900 41 {mtNMT, "Memory Tracking"},
zgu@3900 42 {mtChunk, "Pooled Free Chunks"},
zgu@3900 43 {mtNone, "Unknown"} // It can happen when type tagging records are lagging
zgu@3900 44 // behind
zgu@3900 45 };
zgu@3900 46
zgu@3900 47 MemBaseline::MemBaseline() {
zgu@3900 48 _baselined = false;
zgu@3900 49
zgu@3900 50 for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
zgu@3900 51 _malloc_data[index].set_type(MemType2NameMap[index]._flag);
zgu@3900 52 _vm_data[index].set_type(MemType2NameMap[index]._flag);
zgu@3900 53 _arena_data[index].set_type(MemType2NameMap[index]._flag);
zgu@3900 54 }
zgu@3900 55
zgu@3900 56 _malloc_cs = NULL;
zgu@3900 57 _vm_cs = NULL;
zgu@3900 58
zgu@3900 59 _number_of_classes = 0;
zgu@3900 60 _number_of_threads = 0;
zgu@3900 61 }
zgu@3900 62
zgu@3900 63
zgu@3900 64 void MemBaseline::clear() {
zgu@3900 65 if (_malloc_cs != NULL) {
zgu@3900 66 delete _malloc_cs;
zgu@3900 67 _malloc_cs = NULL;
zgu@3900 68 }
zgu@3900 69
zgu@3900 70 if (_vm_cs != NULL) {
zgu@3900 71 delete _vm_cs;
zgu@3900 72 _vm_cs = NULL;
zgu@3900 73 }
zgu@3900 74
zgu@3900 75 reset();
zgu@3900 76 }
zgu@3900 77
zgu@3900 78
zgu@3900 79 void MemBaseline::reset() {
zgu@3900 80 _baselined = false;
zgu@3900 81 _total_vm_reserved = 0;
zgu@3900 82 _total_vm_committed = 0;
zgu@3900 83 _total_malloced = 0;
zgu@3900 84 _number_of_classes = 0;
zgu@3900 85
zgu@3900 86 if (_malloc_cs != NULL) _malloc_cs->clear();
zgu@3900 87 if (_vm_cs != NULL) _vm_cs->clear();
zgu@3900 88
zgu@3900 89 for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
zgu@3900 90 _malloc_data[index].clear();
zgu@3900 91 _vm_data[index].clear();
zgu@3900 92 _arena_data[index].clear();
zgu@3900 93 }
zgu@3900 94 }
zgu@3900 95
zgu@3900 96 MemBaseline::~MemBaseline() {
zgu@3900 97 if (_malloc_cs != NULL) {
zgu@3900 98 delete _malloc_cs;
zgu@3900 99 }
zgu@3900 100
zgu@3900 101 if (_vm_cs != NULL) {
zgu@3900 102 delete _vm_cs;
zgu@3900 103 }
zgu@3900 104 }
zgu@3900 105
zgu@3900 106 // baseline malloc'd memory records, generate overall summary and summaries by
zgu@3900 107 // memory types
zgu@3900 108 bool MemBaseline::baseline_malloc_summary(const MemPointerArray* malloc_records) {
zgu@3900 109 MemPointerArrayIteratorImpl mItr((MemPointerArray*)malloc_records);
zgu@3900 110 MemPointerRecord* mptr = (MemPointerRecord*)mItr.current();
zgu@3900 111 size_t used_arena_size = 0;
zgu@3900 112 int index;
zgu@3900 113 while (mptr != NULL) {
zgu@3900 114 index = flag2index(FLAGS_TO_MEMORY_TYPE(mptr->flags()));
zgu@3900 115 size_t size = mptr->size();
zgu@3900 116 _total_malloced += size;
zgu@3900 117 _malloc_data[index].inc(size);
zgu@3900 118 if (MemPointerRecord::is_arena_record(mptr->flags())) {
zgu@3900 119 // see if arena size record present
zgu@3900 120 MemPointerRecord* next_p = (MemPointerRecordEx*)mItr.peek_next();
zgu@3900 121 if (MemPointerRecord::is_arena_size_record(next_p->flags())) {
zgu@3900 122 assert(next_p->is_size_record_of_arena(mptr), "arena records do not match");
zgu@3900 123 size = next_p->size();
zgu@3900 124 _arena_data[index].inc(size);
zgu@3900 125 used_arena_size += size;
zgu@3900 126 mItr.next();
zgu@3900 127 }
zgu@3900 128 }
zgu@3900 129 mptr = (MemPointerRecordEx*)mItr.next();
zgu@3900 130 }
zgu@3900 131
zgu@3900 132 // substract used arena size to get size of arena chunk in free list
zgu@3900 133 index = flag2index(mtChunk);
zgu@3900 134 _malloc_data[index].reduce(used_arena_size);
zgu@3900 135 // we really don't know how many chunks in free list, so just set to
zgu@3900 136 // 0
zgu@3900 137 _malloc_data[index].overwrite_counter(0);
zgu@3900 138
zgu@3900 139 return true;
zgu@3900 140 }
zgu@3900 141
zgu@3900 142 // baseline mmap'd memory records, generate overall summary and summaries by
zgu@3900 143 // memory types
zgu@3900 144 bool MemBaseline::baseline_vm_summary(const MemPointerArray* vm_records) {
zgu@3900 145 MemPointerArrayIteratorImpl vItr((MemPointerArray*)vm_records);
zgu@3900 146 VMMemRegion* vptr = (VMMemRegion*)vItr.current();
zgu@3900 147 int index;
zgu@3900 148 while (vptr != NULL) {
zgu@3900 149 index = flag2index(FLAGS_TO_MEMORY_TYPE(vptr->flags()));
zgu@3900 150
zgu@3900 151 // we use the number of thread stack to count threads
zgu@3900 152 if (IS_MEMORY_TYPE(vptr->flags(), mtThreadStack)) {
zgu@3900 153 _number_of_threads ++;
zgu@3900 154 }
zgu@3900 155 _total_vm_reserved += vptr->reserved_size();
zgu@3900 156 _total_vm_committed += vptr->committed_size();
zgu@3900 157 _vm_data[index].inc(vptr->reserved_size(), vptr->committed_size());
zgu@3900 158 vptr = (VMMemRegion*)vItr.next();
zgu@3900 159 }
zgu@3900 160 return true;
zgu@3900 161 }
zgu@3900 162
zgu@3900 163 // baseline malloc'd memory by callsites, but only the callsites with memory allocation
zgu@3900 164 // over 1KB are stored.
zgu@3900 165 bool MemBaseline::baseline_malloc_details(const MemPointerArray* malloc_records) {
zgu@3900 166 assert(MemTracker::track_callsite(), "detail tracking is off");
zgu@3900 167
zgu@3900 168 MemPointerArrayIteratorImpl mItr((MemPointerArray*)malloc_records);
zgu@3900 169 MemPointerRecordEx* mptr = (MemPointerRecordEx*)mItr.current();
zgu@3900 170 MallocCallsitePointer mp;
zgu@3900 171
zgu@3900 172 if (_malloc_cs == NULL) {
zgu@3900 173 _malloc_cs = new (std::nothrow) MemPointerArrayImpl<MallocCallsitePointer>(64);
zgu@3900 174 // out of native memory
zgu@3900 175 if (_malloc_cs == NULL) {
zgu@3900 176 return false;
zgu@3900 177 }
zgu@3900 178 } else {
zgu@3900 179 _malloc_cs->clear();
zgu@3900 180 }
zgu@3900 181
zgu@3900 182 // baseline memory that is totaled over 1 KB
zgu@3900 183 while (mptr != NULL) {
zgu@3900 184 if (!MemPointerRecord::is_arena_size_record(mptr->flags())) {
zgu@3900 185 // skip thread stacks
zgu@3900 186 if (!IS_MEMORY_TYPE(mptr->flags(), mtThreadStack)) {
zgu@3900 187 if (mp.addr() != mptr->pc()) {
zgu@3900 188 if ((mp.amount()/K) > 0) {
zgu@3900 189 if (!_malloc_cs->append(&mp)) {
zgu@3900 190 return false;
zgu@3900 191 }
zgu@3900 192 }
zgu@3900 193 mp = MallocCallsitePointer(mptr->pc());
zgu@3900 194 }
zgu@3900 195 mp.inc(mptr->size());
zgu@3900 196 }
zgu@3900 197 }
zgu@3900 198 mptr = (MemPointerRecordEx*)mItr.next();
zgu@3900 199 }
zgu@3900 200
zgu@3900 201 if (mp.addr() != 0 && (mp.amount()/K) > 0) {
zgu@3900 202 if (!_malloc_cs->append(&mp)) {
zgu@3900 203 return false;
zgu@3900 204 }
zgu@3900 205 }
zgu@3900 206 return true;
zgu@3900 207 }
zgu@3900 208
zgu@3900 209 // baseline mmap'd memory by callsites
zgu@3900 210 bool MemBaseline::baseline_vm_details(const MemPointerArray* vm_records) {
zgu@3900 211 assert(MemTracker::track_callsite(), "detail tracking is off");
zgu@3900 212
zgu@3900 213 VMCallsitePointer vp;
zgu@3900 214 MemPointerArrayIteratorImpl vItr((MemPointerArray*)vm_records);
zgu@3900 215 VMMemRegionEx* vptr = (VMMemRegionEx*)vItr.current();
zgu@3900 216
zgu@3900 217 if (_vm_cs == NULL) {
zgu@3900 218 _vm_cs = new (std::nothrow) MemPointerArrayImpl<VMCallsitePointer>(64);
zgu@3900 219 if (_vm_cs == NULL) {
zgu@3900 220 return false;
zgu@3900 221 }
zgu@3900 222 } else {
zgu@3900 223 _vm_cs->clear();
zgu@3900 224 }
zgu@3900 225
zgu@3900 226 while (vptr != NULL) {
zgu@3900 227 if (vp.addr() != vptr->pc()) {
zgu@3900 228 if (!_vm_cs->append(&vp)) {
zgu@3900 229 return false;
zgu@3900 230 }
zgu@3900 231 vp = VMCallsitePointer(vptr->pc());
zgu@3900 232 }
zgu@3900 233 vp.inc(vptr->size(), vptr->committed_size());
zgu@3900 234 vptr = (VMMemRegionEx*)vItr.next();
zgu@3900 235 }
zgu@3900 236 if (vp.addr() != 0) {
zgu@3900 237 if (!_vm_cs->append(&vp)) {
zgu@3900 238 return false;
zgu@3900 239 }
zgu@3900 240 }
zgu@3900 241 return true;
zgu@3900 242 }
zgu@3900 243
zgu@3900 244 // baseline a snapshot. If summary_only = false, memory usages aggregated by
zgu@3900 245 // callsites are also baselined.
zgu@3900 246 bool MemBaseline::baseline(MemSnapshot& snapshot, bool summary_only) {
zgu@3900 247 MutexLockerEx snapshot_locker(snapshot._lock, true);
zgu@3900 248 reset();
zgu@3900 249 _baselined = baseline_malloc_summary(snapshot._alloc_ptrs) &&
zgu@3900 250 baseline_vm_summary(snapshot._vm_ptrs);
zgu@3900 251 _number_of_classes = SystemDictionary::number_of_classes();
zgu@3900 252
zgu@3900 253 if (!summary_only && MemTracker::track_callsite() && _baselined) {
zgu@3900 254 ((MemPointerArray*)snapshot._alloc_ptrs)->sort((FN_SORT)malloc_sort_by_pc);
zgu@3900 255 ((MemPointerArray*)snapshot._vm_ptrs)->sort((FN_SORT)vm_sort_by_pc);
zgu@3900 256 _baselined = baseline_malloc_details(snapshot._alloc_ptrs) &&
zgu@3900 257 baseline_vm_details(snapshot._vm_ptrs);
zgu@3900 258 ((MemPointerArray*)snapshot._alloc_ptrs)->sort((FN_SORT)malloc_sort_by_addr);
zgu@3900 259 ((MemPointerArray*)snapshot._vm_ptrs)->sort((FN_SORT)vm_sort_by_addr);
zgu@3900 260 }
zgu@3900 261 return _baselined;
zgu@3900 262 }
zgu@3900 263
zgu@3900 264
zgu@3900 265 int MemBaseline::flag2index(MEMFLAGS flag) const {
zgu@3900 266 for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
zgu@3900 267 if (MemType2NameMap[index]._flag == flag) {
zgu@3900 268 return index;
zgu@3900 269 }
zgu@3900 270 }
zgu@3900 271 assert(false, "no type");
zgu@3900 272 return -1;
zgu@3900 273 }
zgu@3900 274
zgu@3900 275 const char* MemBaseline::type2name(MEMFLAGS type) {
zgu@3900 276 for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
zgu@3900 277 if (MemType2NameMap[index]._flag == type) {
zgu@3900 278 return MemType2NameMap[index]._name;
zgu@3900 279 }
zgu@3900 280 }
zgu@3900 281 assert(false, "no type");
zgu@3900 282 return NULL;
zgu@3900 283 }
zgu@3900 284
zgu@3900 285
zgu@3900 286 MemBaseline& MemBaseline::operator=(const MemBaseline& other) {
zgu@3900 287 _total_malloced = other._total_malloced;
zgu@3900 288 _total_vm_reserved = other._total_vm_reserved;
zgu@3900 289 _total_vm_committed = other._total_vm_committed;
zgu@3900 290
zgu@3900 291 _baselined = other._baselined;
zgu@3900 292 _number_of_classes = other._number_of_classes;
zgu@3900 293
zgu@3900 294 for (int index = 0; index < NUMBER_OF_MEMORY_TYPE; index ++) {
zgu@3900 295 _malloc_data[index] = other._malloc_data[index];
zgu@3900 296 _vm_data[index] = other._vm_data[index];
zgu@3900 297 _arena_data[index] = other._arena_data[index];
zgu@3900 298 }
zgu@3900 299
zgu@3900 300 if (MemTracker::track_callsite()) {
zgu@3900 301 assert(_malloc_cs != NULL && _vm_cs != NULL, "out of memory");
zgu@3900 302 assert(other._malloc_cs != NULL && other._vm_cs != NULL,
zgu@3900 303 "not properly baselined");
zgu@3900 304 _malloc_cs->clear();
zgu@3900 305 _vm_cs->clear();
zgu@3900 306 int index;
zgu@3900 307 for (index = 0; index < other._malloc_cs->length(); index ++) {
zgu@3900 308 _malloc_cs->append(other._malloc_cs->at(index));
zgu@3900 309 }
zgu@3900 310
zgu@3900 311 for (index = 0; index < other._vm_cs->length(); index ++) {
zgu@3900 312 _vm_cs->append(other._vm_cs->at(index));
zgu@3900 313 }
zgu@3900 314 }
zgu@3900 315 return *this;
zgu@3900 316 }
zgu@3900 317
zgu@3900 318 /* compare functions for sorting */
zgu@3900 319
zgu@3900 320 // sort snapshot malloc'd records in callsite pc order
zgu@3900 321 int MemBaseline::malloc_sort_by_pc(const void* p1, const void* p2) {
zgu@3900 322 assert(MemTracker::track_callsite(),"Just check");
zgu@3900 323 const MemPointerRecordEx* mp1 = (const MemPointerRecordEx*)p1;
zgu@3900 324 const MemPointerRecordEx* mp2 = (const MemPointerRecordEx*)p2;
zgu@3900 325 return UNSIGNED_COMPARE(mp1->pc(), mp2->pc());
zgu@3900 326 }
zgu@3900 327
zgu@3900 328 // sort baselined malloc'd records in size order
zgu@3900 329 int MemBaseline::bl_malloc_sort_by_size(const void* p1, const void* p2) {
zgu@3900 330 assert(MemTracker::is_on(), "Just check");
zgu@3900 331 const MallocCallsitePointer* mp1 = (const MallocCallsitePointer*)p1;
zgu@3900 332 const MallocCallsitePointer* mp2 = (const MallocCallsitePointer*)p2;
zgu@3900 333 return UNSIGNED_COMPARE(mp2->amount(), mp1->amount());
zgu@3900 334 }
zgu@3900 335
zgu@3900 336 // sort baselined malloc'd records in callsite pc order
zgu@3900 337 int MemBaseline::bl_malloc_sort_by_pc(const void* p1, const void* p2) {
zgu@3900 338 assert(MemTracker::is_on(), "Just check");
zgu@3900 339 const MallocCallsitePointer* mp1 = (const MallocCallsitePointer*)p1;
zgu@3900 340 const MallocCallsitePointer* mp2 = (const MallocCallsitePointer*)p2;
zgu@3900 341 return UNSIGNED_COMPARE(mp1->addr(), mp2->addr());
zgu@3900 342 }
zgu@3900 343
zgu@3900 344 // sort snapshot mmap'd records in callsite pc order
zgu@3900 345 int MemBaseline::vm_sort_by_pc(const void* p1, const void* p2) {
zgu@3900 346 assert(MemTracker::track_callsite(),"Just check");
zgu@3900 347 const VMMemRegionEx* mp1 = (const VMMemRegionEx*)p1;
zgu@3900 348 const VMMemRegionEx* mp2 = (const VMMemRegionEx*)p2;
zgu@3900 349 return UNSIGNED_COMPARE(mp1->pc(), mp2->pc());
zgu@3900 350 }
zgu@3900 351
zgu@3900 352 // sort baselined mmap'd records in size (reserved size) order
zgu@3900 353 int MemBaseline::bl_vm_sort_by_size(const void* p1, const void* p2) {
zgu@3900 354 assert(MemTracker::is_on(), "Just check");
zgu@3900 355 const VMCallsitePointer* mp1 = (const VMCallsitePointer*)p1;
zgu@3900 356 const VMCallsitePointer* mp2 = (const VMCallsitePointer*)p2;
zgu@3900 357 return UNSIGNED_COMPARE(mp2->reserved_amount(), mp1->reserved_amount());
zgu@3900 358 }
zgu@3900 359
zgu@3900 360 // sort baselined mmap'd records in callsite pc order
zgu@3900 361 int MemBaseline::bl_vm_sort_by_pc(const void* p1, const void* p2) {
zgu@3900 362 assert(MemTracker::is_on(), "Just check");
zgu@3900 363 const VMCallsitePointer* mp1 = (const VMCallsitePointer*)p1;
zgu@3900 364 const VMCallsitePointer* mp2 = (const VMCallsitePointer*)p2;
zgu@3900 365 return UNSIGNED_COMPARE(mp1->addr(), mp2->addr());
zgu@3900 366 }
zgu@3900 367
zgu@3900 368
zgu@3900 369 // sort snapshot malloc'd records in memory block address order
zgu@3900 370 int MemBaseline::malloc_sort_by_addr(const void* p1, const void* p2) {
zgu@3900 371 assert(MemTracker::is_on(), "Just check");
zgu@3900 372 const MemPointerRecord* mp1 = (const MemPointerRecord*)p1;
zgu@3900 373 const MemPointerRecord* mp2 = (const MemPointerRecord*)p2;
zgu@3900 374 int delta = UNSIGNED_COMPARE(mp1->addr(), mp2->addr());
zgu@3900 375 assert(delta != 0, "dup pointer");
zgu@3900 376 return delta;
zgu@3900 377 }
zgu@3900 378
zgu@3900 379 // sort snapshot mmap'd records in memory block address order
zgu@3900 380 int MemBaseline::vm_sort_by_addr(const void* p1, const void* p2) {
zgu@3900 381 assert(MemTracker::is_on(), "Just check");
zgu@3900 382 const VMMemRegion* mp1 = (const VMMemRegion*)p1;
zgu@3900 383 const VMMemRegion* mp2 = (const VMMemRegion*)p2;
zgu@3900 384 int delta = UNSIGNED_COMPARE(mp1->addr(), mp2->addr());
zgu@3900 385 assert(delta != 0, "dup pointer");
zgu@3900 386 return delta;
zgu@3900 387 }

mercurial