src/share/vm/services/memBaseline.cpp

Wed, 20 Aug 2014 08:41:15 -0400

author
zgu
date
Wed, 20 Aug 2014 08:41:15 -0400
changeset 7080
dd3939fe8424
parent 7074
833b0f92429a
child 7535
7ae4e26cb1e0
child 9054
db49d511817a
permissions
-rw-r--r--

8054546: NMT2 leaks memory
Summary: Fixed memory leak in NMT by baselining memory in c heap instead of an arena.
Reviewed-by: coleenp, minqi

zgu@3900 1 /*
zgu@7074 2 * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved.
zgu@3900 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
zgu@3900 4 *
zgu@3900 5 * This code is free software; you can redistribute it and/or modify it
zgu@3900 6 * under the terms of the GNU General Public License version 2 only, as
zgu@3900 7 * published by the Free Software Foundation.
zgu@3900 8 *
zgu@3900 9 * This code is distributed in the hope that it will be useful, but WITHOUT
zgu@3900 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
zgu@3900 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
zgu@3900 12 * version 2 for more details (a copy is included in the LICENSE file that
zgu@3900 13 * accompanied this code).
zgu@3900 14 *
zgu@3900 15 * You should have received a copy of the GNU General Public License version
zgu@3900 16 * 2 along with this work; if not, write to the Free Software Foundation,
zgu@3900 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
zgu@3900 18 *
zgu@3900 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
zgu@3900 20 * or visit www.oracle.com if you need additional information or have any
zgu@3900 21 * questions.
zgu@3900 22 *
zgu@3900 23 */
zgu@3900 24 #include "precompiled.hpp"
zgu@7074 25
zgu@3900 26 #include "memory/allocation.hpp"
zgu@4980 27 #include "runtime/safepoint.hpp"
zgu@4980 28 #include "runtime/thread.inline.hpp"
zgu@3900 29 #include "services/memBaseline.hpp"
zgu@3900 30 #include "services/memTracker.hpp"
zgu@3900 31
zgu@7074 32 /*
zgu@7074 33 * Sizes are sorted in descenting order for reporting
zgu@7074 34 */
zgu@7074 35 int compare_malloc_size(const MallocSite& s1, const MallocSite& s2) {
zgu@7074 36 if (s1.size() == s2.size()) {
zgu@7074 37 return 0;
zgu@7074 38 } else if (s1.size() > s2.size()) {
zgu@7074 39 return -1;
zgu@7074 40 } else {
zgu@7074 41 return 1;
zgu@3900 42 }
zgu@3900 43 }
zgu@3900 44
zgu@7074 45
zgu@7074 46 int compare_virtual_memory_size(const VirtualMemoryAllocationSite& s1,
zgu@7074 47 const VirtualMemoryAllocationSite& s2) {
zgu@7074 48 if (s1.reserved() == s2.reserved()) {
zgu@7074 49 return 0;
zgu@7074 50 } else if (s1.reserved() > s2.reserved()) {
zgu@7074 51 return -1;
zgu@7074 52 } else {
zgu@7074 53 return 1;
zgu@7074 54 }
zgu@3900 55 }
zgu@3900 56
zgu@7074 57 // Sort into allocation site addresses order for baseline comparison
zgu@7074 58 int compare_malloc_site(const MallocSite& s1, const MallocSite& s2) {
zgu@7074 59 return s1.call_stack()->compare(*s2.call_stack());
zgu@7074 60 }
zgu@7074 61
zgu@7074 62
zgu@7074 63 int compare_virtual_memory_site(const VirtualMemoryAllocationSite& s1,
zgu@7074 64 const VirtualMemoryAllocationSite& s2) {
zgu@7074 65 return s1.call_stack()->compare(*s2.call_stack());
zgu@7074 66 }
zgu@7074 67
zgu@7074 68 /*
zgu@7074 69 * Walker to walk malloc allocation site table
zgu@7074 70 */
zgu@7074 71 class MallocAllocationSiteWalker : public MallocSiteWalker {
zgu@7074 72 private:
zgu@7080 73 SortedLinkedList<MallocSite, compare_malloc_size> _malloc_sites;
zgu@7074 74 size_t _count;
zgu@7074 75
zgu@7074 76 // Entries in MallocSiteTable with size = 0 and count = 0,
zgu@7074 77 // when the malloc site is not longer there.
zgu@7074 78 public:
zgu@7080 79 MallocAllocationSiteWalker() : _count(0) { }
zgu@7074 80
zgu@7074 81 inline size_t count() const { return _count; }
zgu@7074 82
zgu@7074 83 LinkedList<MallocSite>* malloc_sites() {
zgu@7074 84 return &_malloc_sites;
zgu@7074 85 }
zgu@7074 86
zgu@7074 87 bool do_malloc_site(const MallocSite* site) {
zgu@7074 88 if (site->size() >= MemBaseline::SIZE_THRESHOLD) {
zgu@7074 89 if (_malloc_sites.add(*site) != NULL) {
zgu@7074 90 _count++;
zgu@7074 91 return true;
zgu@7074 92 } else {
zgu@7074 93 return false; // OOM
zgu@7074 94 }
zgu@4274 95 } else {
zgu@7074 96 // malloc site does not meet threshold, ignore and continue
zgu@7074 97 return true;
zgu@7074 98 }
zgu@7074 99 }
zgu@7074 100 };
zgu@7074 101
zgu@7074 102 // Compare virtual memory region's base address
zgu@7074 103 int compare_virtual_memory_base(const ReservedMemoryRegion& r1, const ReservedMemoryRegion& r2) {
zgu@7074 104 return r1.compare(r2);
zgu@7074 105 }
zgu@7074 106
zgu@7074 107 // Walk all virtual memory regions for baselining
zgu@7074 108 class VirtualMemoryAllocationWalker : public VirtualMemoryWalker {
zgu@7074 109 private:
zgu@7080 110 SortedLinkedList<ReservedMemoryRegion, compare_virtual_memory_base>
zgu@7074 111 _virtual_memory_regions;
zgu@7074 112 size_t _count;
zgu@7074 113
zgu@7074 114 public:
zgu@7080 115 VirtualMemoryAllocationWalker() : _count(0) { }
zgu@7074 116
zgu@7074 117 bool do_allocation_site(const ReservedMemoryRegion* rgn) {
zgu@7074 118 if (rgn->size() >= MemBaseline::SIZE_THRESHOLD) {
zgu@7074 119 if (_virtual_memory_regions.add(*rgn) != NULL) {
zgu@7074 120 _count ++;
zgu@7074 121 return true;
zgu@7074 122 } else {
zgu@7074 123 return false;
zgu@3900 124 }
zgu@3900 125 }
zgu@7074 126 return true;
zgu@3900 127 }
zgu@3900 128
zgu@7074 129 LinkedList<ReservedMemoryRegion>* virtual_memory_allocations() {
zgu@7074 130 return &_virtual_memory_regions;
zgu@7074 131 }
zgu@7074 132 };
zgu@7074 133
zgu@7074 134
zgu@7074 135 bool MemBaseline::baseline_summary() {
zgu@7080 136 MallocMemorySummary::snapshot(&_malloc_memory_snapshot);
zgu@7080 137 VirtualMemorySummary::snapshot(&_virtual_memory_snapshot);
zgu@7074 138 return true;
zgu@7074 139 }
zgu@7074 140
zgu@7074 141 bool MemBaseline::baseline_allocation_sites() {
zgu@7074 142 // Malloc allocation sites
zgu@7080 143 MallocAllocationSiteWalker malloc_walker;
zgu@7074 144 if (!MallocSiteTable::walk_malloc_site(&malloc_walker)) {
zgu@7074 145 return false;
zgu@7074 146 }
zgu@7074 147
zgu@7080 148 _malloc_sites.move(malloc_walker.malloc_sites());
zgu@7074 149 // The malloc sites are collected in size order
zgu@7074 150 _malloc_sites_order = by_size;
zgu@7074 151
zgu@7074 152 // Virtual memory allocation sites
zgu@7080 153 VirtualMemoryAllocationWalker virtual_memory_walker;
zgu@7074 154 if (!VirtualMemoryTracker::walk_virtual_memory(&virtual_memory_walker)) {
zgu@7074 155 return false;
zgu@7074 156 }
zgu@7074 157
zgu@7074 158 // Virtual memory allocations are collected in call stack order
zgu@7080 159 _virtual_memory_allocations.move(virtual_memory_walker.virtual_memory_allocations());
zgu@7074 160
zgu@7074 161 if (!aggregate_virtual_memory_allocation_sites()) {
zgu@7074 162 return false;
zgu@7074 163 }
zgu@7074 164 // Virtual memory allocation sites are aggregrated in call stack order
zgu@7074 165 _virtual_memory_sites_order = by_address;
zgu@3900 166
zgu@3900 167 return true;
zgu@3900 168 }
zgu@3900 169
zgu@7074 170 bool MemBaseline::baseline(bool summaryOnly) {
zgu@7074 171 reset();
zgu@4193 172
zgu@7074 173 _class_count = InstanceKlass::number_of_instance_classes();
zgu@4193 174
zgu@7074 175 if (!baseline_summary()) {
zgu@4193 176 return false;
zgu@4193 177 }
zgu@4193 178
zgu@7074 179 _baseline_type = Summary_baselined;
zgu@4193 180
zgu@7074 181 // baseline details
zgu@7074 182 if (!summaryOnly &&
zgu@7074 183 MemTracker::tracking_level() == NMT_detail) {
zgu@7074 184 baseline_allocation_sites();
zgu@7074 185 _baseline_type = Detail_baselined;
zgu@4193 186 }
zgu@4193 187
zgu@3900 188 return true;
zgu@3900 189 }
zgu@3900 190
zgu@7074 191 int compare_allocation_site(const VirtualMemoryAllocationSite& s1,
zgu@7074 192 const VirtualMemoryAllocationSite& s2) {
zgu@7074 193 return s1.call_stack()->compare(*s2.call_stack());
zgu@3900 194 }
zgu@3900 195
zgu@7074 196 bool MemBaseline::aggregate_virtual_memory_allocation_sites() {
zgu@7080 197 SortedLinkedList<VirtualMemoryAllocationSite, compare_allocation_site> allocation_sites;
zgu@3900 198
zgu@7074 199 VirtualMemoryAllocationIterator itr = virtual_memory_allocations();
zgu@7074 200 const ReservedMemoryRegion* rgn;
zgu@7074 201 VirtualMemoryAllocationSite* site;
zgu@7074 202 while ((rgn = itr.next()) != NULL) {
zgu@7074 203 VirtualMemoryAllocationSite tmp(*rgn->call_stack());
zgu@7074 204 site = allocation_sites.find(tmp);
zgu@7074 205 if (site == NULL) {
zgu@7074 206 LinkedListNode<VirtualMemoryAllocationSite>* node =
zgu@7074 207 allocation_sites.add(tmp);
zgu@7074 208 if (node == NULL) return false;
zgu@7074 209 site = node->data();
zgu@3900 210 }
zgu@7074 211 site->reserve_memory(rgn->size());
zgu@7074 212 site->commit_memory(rgn->committed_size());
zgu@3900 213 }
zgu@7074 214
zgu@7080 215 _virtual_memory_sites.move(&allocation_sites);
zgu@7074 216 return true;
zgu@3900 217 }
zgu@3900 218
zgu@7074 219 MallocSiteIterator MemBaseline::malloc_sites(SortingOrder order) {
zgu@7080 220 assert(!_malloc_sites.is_empty(), "Not detail baseline");
zgu@7074 221 switch(order) {
zgu@7074 222 case by_size:
zgu@7074 223 malloc_sites_to_size_order();
zgu@7074 224 break;
zgu@7074 225 case by_site:
zgu@7074 226 malloc_sites_to_allocation_site_order();
zgu@7074 227 break;
zgu@7074 228 case by_address:
zgu@7074 229 default:
zgu@7074 230 ShouldNotReachHere();
zgu@3900 231 }
zgu@7074 232 return MallocSiteIterator(_malloc_sites.head());
zgu@3900 233 }
zgu@3900 234
zgu@7074 235 VirtualMemorySiteIterator MemBaseline::virtual_memory_sites(SortingOrder order) {
zgu@7080 236 assert(!_virtual_memory_sites.is_empty(), "Not detail baseline");
zgu@7074 237 switch(order) {
zgu@7074 238 case by_size:
zgu@7074 239 virtual_memory_sites_to_size_order();
zgu@7074 240 break;
zgu@7074 241 case by_site:
zgu@7074 242 virtual_memory_sites_to_reservation_site_order();
zgu@7074 243 break;
zgu@7074 244 case by_address:
zgu@7074 245 default:
zgu@7074 246 ShouldNotReachHere();
zgu@3900 247 }
zgu@7074 248 return VirtualMemorySiteIterator(_virtual_memory_sites.head());
zgu@3900 249 }
zgu@3900 250
zgu@3900 251
zgu@7074 252 // Sorting allocations sites in different orders
zgu@7074 253 void MemBaseline::malloc_sites_to_size_order() {
zgu@7074 254 if (_malloc_sites_order != by_size) {
zgu@7080 255 SortedLinkedList<MallocSite, compare_malloc_size> tmp;
zgu@7074 256
zgu@7074 257 // Add malloc sites to sorted linked list to sort into size order
zgu@7074 258 tmp.move(&_malloc_sites);
zgu@7074 259 _malloc_sites.set_head(tmp.head());
zgu@7074 260 tmp.set_head(NULL);
zgu@7074 261 _malloc_sites_order = by_size;
zgu@7074 262 }
zgu@3900 263 }
zgu@3900 264
zgu@7074 265 void MemBaseline::malloc_sites_to_allocation_site_order() {
zgu@7074 266 if (_malloc_sites_order != by_site) {
zgu@7080 267 SortedLinkedList<MallocSite, compare_malloc_site> tmp;
zgu@7074 268 // Add malloc sites to sorted linked list to sort into site (address) order
zgu@7074 269 tmp.move(&_malloc_sites);
zgu@7074 270 _malloc_sites.set_head(tmp.head());
zgu@7074 271 tmp.set_head(NULL);
zgu@7074 272 _malloc_sites_order = by_site;
zgu@7074 273 }
zgu@3900 274 }
zgu@3900 275
zgu@7074 276 void MemBaseline::virtual_memory_sites_to_size_order() {
zgu@7074 277 if (_virtual_memory_sites_order != by_size) {
zgu@7080 278 SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_size> tmp;
zgu@7074 279
zgu@7074 280 tmp.move(&_virtual_memory_sites);
zgu@7074 281
zgu@7074 282 _virtual_memory_sites.set_head(tmp.head());
zgu@7074 283 tmp.set_head(NULL);
zgu@7074 284 _virtual_memory_sites_order = by_size;
zgu@7074 285 }
zgu@3900 286 }
zgu@3900 287
zgu@7074 288 void MemBaseline::virtual_memory_sites_to_reservation_site_order() {
zgu@7074 289 if (_virtual_memory_sites_order != by_size) {
zgu@7080 290 SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_site> tmp;
zgu@3900 291
zgu@7080 292 tmp.move(&_virtual_memory_sites);
zgu@7074 293
zgu@7074 294 _virtual_memory_sites.set_head(tmp.head());
zgu@7074 295 tmp.set_head(NULL);
zgu@7074 296
zgu@7074 297 _virtual_memory_sites_order = by_size;
zgu@7074 298 }
zgu@3900 299 }
zgu@3900 300

mercurial