src/share/vm/services/memBaseline.cpp

Wed, 27 Aug 2014 08:19:12 -0400

author
zgu
date
Wed, 27 Aug 2014 08:19:12 -0400
changeset 7074
833b0f92429a
parent 5375
72fce0b2d341
child 7080
dd3939fe8424
permissions
-rw-r--r--

8046598: Scalable Native memory tracking development
Summary: Enhance scalability of native memory tracking
Reviewed-by: coleenp, ctornqvi, gtriantafill

zgu@3900 1 /*
zgu@7074 2 * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved.
zgu@3900 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
zgu@3900 4 *
zgu@3900 5 * This code is free software; you can redistribute it and/or modify it
zgu@3900 6 * under the terms of the GNU General Public License version 2 only, as
zgu@3900 7 * published by the Free Software Foundation.
zgu@3900 8 *
zgu@3900 9 * This code is distributed in the hope that it will be useful, but WITHOUT
zgu@3900 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
zgu@3900 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
zgu@3900 12 * version 2 for more details (a copy is included in the LICENSE file that
zgu@3900 13 * accompanied this code).
zgu@3900 14 *
zgu@3900 15 * You should have received a copy of the GNU General Public License version
zgu@3900 16 * 2 along with this work; if not, write to the Free Software Foundation,
zgu@3900 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
zgu@3900 18 *
zgu@3900 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
zgu@3900 20 * or visit www.oracle.com if you need additional information or have any
zgu@3900 21 * questions.
zgu@3900 22 *
zgu@3900 23 */
zgu@3900 24 #include "precompiled.hpp"
zgu@7074 25
zgu@3900 26 #include "memory/allocation.hpp"
zgu@4980 27 #include "runtime/safepoint.hpp"
zgu@4980 28 #include "runtime/thread.inline.hpp"
zgu@3900 29 #include "services/memBaseline.hpp"
zgu@3900 30 #include "services/memTracker.hpp"
zgu@3900 31
zgu@7074 32 /*
zgu@7074 33 * Sizes are sorted in descenting order for reporting
zgu@7074 34 */
zgu@7074 35 int compare_malloc_size(const MallocSite& s1, const MallocSite& s2) {
zgu@7074 36 if (s1.size() == s2.size()) {
zgu@7074 37 return 0;
zgu@7074 38 } else if (s1.size() > s2.size()) {
zgu@7074 39 return -1;
zgu@7074 40 } else {
zgu@7074 41 return 1;
zgu@3900 42 }
zgu@3900 43 }
zgu@3900 44
zgu@7074 45
zgu@7074 46 int compare_virtual_memory_size(const VirtualMemoryAllocationSite& s1,
zgu@7074 47 const VirtualMemoryAllocationSite& s2) {
zgu@7074 48 if (s1.reserved() == s2.reserved()) {
zgu@7074 49 return 0;
zgu@7074 50 } else if (s1.reserved() > s2.reserved()) {
zgu@7074 51 return -1;
zgu@7074 52 } else {
zgu@7074 53 return 1;
zgu@7074 54 }
zgu@3900 55 }
zgu@3900 56
zgu@7074 57 // Sort into allocation site addresses order for baseline comparison
zgu@7074 58 int compare_malloc_site(const MallocSite& s1, const MallocSite& s2) {
zgu@7074 59 return s1.call_stack()->compare(*s2.call_stack());
zgu@7074 60 }
zgu@7074 61
zgu@7074 62
zgu@7074 63 int compare_virtual_memory_site(const VirtualMemoryAllocationSite& s1,
zgu@7074 64 const VirtualMemoryAllocationSite& s2) {
zgu@7074 65 return s1.call_stack()->compare(*s2.call_stack());
zgu@7074 66 }
zgu@7074 67
zgu@7074 68 /*
zgu@7074 69 * Walker to walk malloc allocation site table
zgu@7074 70 */
zgu@7074 71 class MallocAllocationSiteWalker : public MallocSiteWalker {
zgu@7074 72 private:
zgu@7074 73 SortedLinkedList<MallocSite, compare_malloc_size, ResourceObj::ARENA>
zgu@7074 74 _malloc_sites;
zgu@7074 75 size_t _count;
zgu@7074 76
zgu@7074 77 // Entries in MallocSiteTable with size = 0 and count = 0,
zgu@7074 78 // when the malloc site is not longer there.
zgu@7074 79 public:
zgu@7074 80 MallocAllocationSiteWalker(Arena* arena) : _count(0), _malloc_sites(arena) {
zgu@7074 81 }
zgu@7074 82
zgu@7074 83 inline size_t count() const { return _count; }
zgu@7074 84
zgu@7074 85 LinkedList<MallocSite>* malloc_sites() {
zgu@7074 86 return &_malloc_sites;
zgu@7074 87 }
zgu@7074 88
zgu@7074 89 bool do_malloc_site(const MallocSite* site) {
zgu@7074 90 if (site->size() >= MemBaseline::SIZE_THRESHOLD) {
zgu@7074 91 if (_malloc_sites.add(*site) != NULL) {
zgu@7074 92 _count++;
zgu@7074 93 return true;
zgu@7074 94 } else {
zgu@7074 95 return false; // OOM
zgu@7074 96 }
zgu@4274 97 } else {
zgu@7074 98 // malloc site does not meet threshold, ignore and continue
zgu@7074 99 return true;
zgu@7074 100 }
zgu@7074 101 }
zgu@7074 102 };
zgu@7074 103
zgu@7074 104 // Compare virtual memory region's base address
zgu@7074 105 int compare_virtual_memory_base(const ReservedMemoryRegion& r1, const ReservedMemoryRegion& r2) {
zgu@7074 106 return r1.compare(r2);
zgu@7074 107 }
zgu@7074 108
zgu@7074 109 // Walk all virtual memory regions for baselining
zgu@7074 110 class VirtualMemoryAllocationWalker : public VirtualMemoryWalker {
zgu@7074 111 private:
zgu@7074 112 SortedLinkedList<ReservedMemoryRegion, compare_virtual_memory_base, ResourceObj::ARENA>
zgu@7074 113 _virtual_memory_regions;
zgu@7074 114 size_t _count;
zgu@7074 115
zgu@7074 116 public:
zgu@7074 117 VirtualMemoryAllocationWalker(Arena* a) : _count(0), _virtual_memory_regions(a) {
zgu@7074 118 }
zgu@7074 119
zgu@7074 120 bool do_allocation_site(const ReservedMemoryRegion* rgn) {
zgu@7074 121 if (rgn->size() >= MemBaseline::SIZE_THRESHOLD) {
zgu@7074 122 if (_virtual_memory_regions.add(*rgn) != NULL) {
zgu@7074 123 _count ++;
zgu@7074 124 return true;
zgu@7074 125 } else {
zgu@7074 126 return false;
zgu@3900 127 }
zgu@3900 128 }
zgu@7074 129 return true;
zgu@3900 130 }
zgu@3900 131
zgu@7074 132 LinkedList<ReservedMemoryRegion>* virtual_memory_allocations() {
zgu@7074 133 return &_virtual_memory_regions;
zgu@7074 134 }
zgu@7074 135 };
zgu@7074 136
zgu@7074 137
zgu@7074 138 bool MemBaseline::baseline_summary() {
zgu@7074 139 assert(_malloc_memory_snapshot == NULL, "Malloc baseline not yet reset");
zgu@7074 140 assert(_virtual_memory_snapshot == NULL, "Virtual baseline not yet reset");
zgu@7074 141
zgu@7074 142 _malloc_memory_snapshot = new (arena()) MallocMemorySnapshot();
zgu@7074 143 _virtual_memory_snapshot = new (arena()) VirtualMemorySnapshot();
zgu@7074 144 if (_malloc_memory_snapshot == NULL || _virtual_memory_snapshot == NULL) {
zgu@7074 145 return false;
zgu@7074 146 }
zgu@7074 147 MallocMemorySummary::snapshot(_malloc_memory_snapshot);
zgu@7074 148 VirtualMemorySummary::snapshot(_virtual_memory_snapshot);
zgu@7074 149 return true;
zgu@7074 150 }
zgu@7074 151
zgu@7074 152 bool MemBaseline::baseline_allocation_sites() {
zgu@7074 153 assert(arena() != NULL, "Just check");
zgu@7074 154 // Malloc allocation sites
zgu@7074 155 MallocAllocationSiteWalker malloc_walker(arena());
zgu@7074 156 if (!MallocSiteTable::walk_malloc_site(&malloc_walker)) {
zgu@7074 157 return false;
zgu@7074 158 }
zgu@7074 159
zgu@7074 160 _malloc_sites.set_head(malloc_walker.malloc_sites()->head());
zgu@7074 161 // The malloc sites are collected in size order
zgu@7074 162 _malloc_sites_order = by_size;
zgu@7074 163
zgu@7074 164 // Virtual memory allocation sites
zgu@7074 165 VirtualMemoryAllocationWalker virtual_memory_walker(arena());
zgu@7074 166 if (!VirtualMemoryTracker::walk_virtual_memory(&virtual_memory_walker)) {
zgu@7074 167 return false;
zgu@7074 168 }
zgu@7074 169
zgu@7074 170 // Virtual memory allocations are collected in call stack order
zgu@7074 171 _virtual_memory_allocations.set_head(virtual_memory_walker.virtual_memory_allocations()->head());
zgu@7074 172
zgu@7074 173 if (!aggregate_virtual_memory_allocation_sites()) {
zgu@7074 174 return false;
zgu@7074 175 }
zgu@7074 176 // Virtual memory allocation sites are aggregrated in call stack order
zgu@7074 177 _virtual_memory_sites_order = by_address;
zgu@3900 178
zgu@3900 179 return true;
zgu@3900 180 }
zgu@3900 181
zgu@7074 182 bool MemBaseline::baseline(bool summaryOnly) {
zgu@7074 183 if (arena() == NULL) {
zgu@7074 184 _arena = new (std::nothrow, mtNMT) Arena(mtNMT);
zgu@7074 185 if (arena() == NULL) return false;
zgu@3900 186 }
zgu@3900 187
zgu@7074 188 reset();
zgu@4193 189
zgu@7074 190 _class_count = InstanceKlass::number_of_instance_classes();
zgu@4193 191
zgu@7074 192 if (!baseline_summary()) {
zgu@4193 193 return false;
zgu@4193 194 }
zgu@4193 195
zgu@7074 196 _baseline_type = Summary_baselined;
zgu@4193 197
zgu@7074 198 // baseline details
zgu@7074 199 if (!summaryOnly &&
zgu@7074 200 MemTracker::tracking_level() == NMT_detail) {
zgu@7074 201 baseline_allocation_sites();
zgu@7074 202 _baseline_type = Detail_baselined;
zgu@4193 203 }
zgu@4193 204
zgu@3900 205 return true;
zgu@3900 206 }
zgu@3900 207
zgu@7074 208 int compare_allocation_site(const VirtualMemoryAllocationSite& s1,
zgu@7074 209 const VirtualMemoryAllocationSite& s2) {
zgu@7074 210 return s1.call_stack()->compare(*s2.call_stack());
zgu@3900 211 }
zgu@3900 212
zgu@7074 213 bool MemBaseline::aggregate_virtual_memory_allocation_sites() {
zgu@7074 214 SortedLinkedList<VirtualMemoryAllocationSite, compare_allocation_site, ResourceObj::ARENA>
zgu@7074 215 allocation_sites(arena());
zgu@3900 216
zgu@7074 217 VirtualMemoryAllocationIterator itr = virtual_memory_allocations();
zgu@7074 218 const ReservedMemoryRegion* rgn;
zgu@7074 219 VirtualMemoryAllocationSite* site;
zgu@7074 220 while ((rgn = itr.next()) != NULL) {
zgu@7074 221 VirtualMemoryAllocationSite tmp(*rgn->call_stack());
zgu@7074 222 site = allocation_sites.find(tmp);
zgu@7074 223 if (site == NULL) {
zgu@7074 224 LinkedListNode<VirtualMemoryAllocationSite>* node =
zgu@7074 225 allocation_sites.add(tmp);
zgu@7074 226 if (node == NULL) return false;
zgu@7074 227 site = node->data();
zgu@3900 228 }
zgu@7074 229 site->reserve_memory(rgn->size());
zgu@7074 230 site->commit_memory(rgn->committed_size());
zgu@3900 231 }
zgu@7074 232
zgu@7074 233 _virtual_memory_sites.set_head(allocation_sites.head());
zgu@7074 234 return true;
zgu@3900 235 }
zgu@3900 236
zgu@7074 237 MallocSiteIterator MemBaseline::malloc_sites(SortingOrder order) {
zgu@7074 238 assert(!_malloc_sites.is_empty(), "Detail baseline?");
zgu@7074 239 switch(order) {
zgu@7074 240 case by_size:
zgu@7074 241 malloc_sites_to_size_order();
zgu@7074 242 break;
zgu@7074 243 case by_site:
zgu@7074 244 malloc_sites_to_allocation_site_order();
zgu@7074 245 break;
zgu@7074 246 case by_address:
zgu@7074 247 default:
zgu@7074 248 ShouldNotReachHere();
zgu@3900 249 }
zgu@7074 250 return MallocSiteIterator(_malloc_sites.head());
zgu@3900 251 }
zgu@3900 252
zgu@7074 253 VirtualMemorySiteIterator MemBaseline::virtual_memory_sites(SortingOrder order) {
zgu@7074 254 assert(!_virtual_memory_sites.is_empty(), "Detail baseline?");
zgu@7074 255 switch(order) {
zgu@7074 256 case by_size:
zgu@7074 257 virtual_memory_sites_to_size_order();
zgu@7074 258 break;
zgu@7074 259 case by_site:
zgu@7074 260 virtual_memory_sites_to_reservation_site_order();
zgu@7074 261 break;
zgu@7074 262 case by_address:
zgu@7074 263 default:
zgu@7074 264 ShouldNotReachHere();
zgu@3900 265 }
zgu@7074 266 return VirtualMemorySiteIterator(_virtual_memory_sites.head());
zgu@3900 267 }
zgu@3900 268
zgu@3900 269
zgu@7074 270 // Sorting allocations sites in different orders
zgu@7074 271 void MemBaseline::malloc_sites_to_size_order() {
zgu@7074 272 if (_malloc_sites_order != by_size) {
zgu@7074 273 SortedLinkedList<MallocSite, compare_malloc_size, ResourceObj::ARENA>
zgu@7074 274 tmp(arena());
zgu@7074 275
zgu@7074 276 // Add malloc sites to sorted linked list to sort into size order
zgu@7074 277 tmp.move(&_malloc_sites);
zgu@7074 278 _malloc_sites.set_head(tmp.head());
zgu@7074 279 tmp.set_head(NULL);
zgu@7074 280 _malloc_sites_order = by_size;
zgu@7074 281 }
zgu@3900 282 }
zgu@3900 283
zgu@7074 284 void MemBaseline::malloc_sites_to_allocation_site_order() {
zgu@7074 285 if (_malloc_sites_order != by_site) {
zgu@7074 286 SortedLinkedList<MallocSite, compare_malloc_site, ResourceObj::ARENA>
zgu@7074 287 tmp(arena());
zgu@7074 288 // Add malloc sites to sorted linked list to sort into site (address) order
zgu@7074 289 tmp.move(&_malloc_sites);
zgu@7074 290 _malloc_sites.set_head(tmp.head());
zgu@7074 291 tmp.set_head(NULL);
zgu@7074 292 _malloc_sites_order = by_site;
zgu@7074 293 }
zgu@3900 294 }
zgu@3900 295
zgu@7074 296 void MemBaseline::virtual_memory_sites_to_size_order() {
zgu@7074 297 if (_virtual_memory_sites_order != by_size) {
zgu@7074 298 SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_size, ResourceObj::ARENA>
zgu@7074 299 tmp(arena());
zgu@7074 300
zgu@7074 301 tmp.move(&_virtual_memory_sites);
zgu@7074 302
zgu@7074 303 _virtual_memory_sites.set_head(tmp.head());
zgu@7074 304 tmp.set_head(NULL);
zgu@7074 305 _virtual_memory_sites_order = by_size;
zgu@7074 306 }
zgu@3900 307 }
zgu@3900 308
zgu@7074 309 void MemBaseline::virtual_memory_sites_to_reservation_site_order() {
zgu@7074 310 if (_virtual_memory_sites_order != by_size) {
zgu@7074 311 SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_site, ResourceObj::ARENA>
zgu@7074 312 tmp(arena());
zgu@3900 313
zgu@7074 314 tmp.add(&_virtual_memory_sites);
zgu@7074 315
zgu@7074 316 _virtual_memory_sites.set_head(tmp.head());
zgu@7074 317 tmp.set_head(NULL);
zgu@7074 318
zgu@7074 319 _virtual_memory_sites_order = by_size;
zgu@7074 320 }
zgu@3900 321 }
zgu@3900 322

mercurial