src/share/vm/services/memBaseline.cpp

Wed, 27 Aug 2014 08:19:12 -0400

author
zgu
date
Wed, 27 Aug 2014 08:19:12 -0400
changeset 7074
833b0f92429a
parent 5375
72fce0b2d341
child 7080
dd3939fe8424
permissions
-rw-r--r--

8046598: Scalable Native memory tracking development
Summary: Enhance scalability of native memory tracking
Reviewed-by: coleenp, ctornqvi, gtriantafill

     1 /*
     2  * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    24 #include "precompiled.hpp"
    26 #include "memory/allocation.hpp"
    27 #include "runtime/safepoint.hpp"
    28 #include "runtime/thread.inline.hpp"
    29 #include "services/memBaseline.hpp"
    30 #include "services/memTracker.hpp"
    32 /*
    33  * Sizes are sorted in descenting order for reporting
    34  */
    35 int compare_malloc_size(const MallocSite& s1, const MallocSite& s2) {
    36   if (s1.size() == s2.size()) {
    37     return 0;
    38   } else if (s1.size() > s2.size()) {
    39     return -1;
    40   } else {
    41     return 1;
    42   }
    43 }
    46 int compare_virtual_memory_size(const VirtualMemoryAllocationSite& s1,
    47   const VirtualMemoryAllocationSite& s2) {
    48   if (s1.reserved() == s2.reserved()) {
    49     return 0;
    50   } else if (s1.reserved() > s2.reserved()) {
    51     return -1;
    52   } else {
    53     return 1;
    54   }
    55 }
    57 // Sort into allocation site addresses order for baseline comparison
    58 int compare_malloc_site(const MallocSite& s1, const MallocSite& s2) {
    59   return s1.call_stack()->compare(*s2.call_stack());
    60 }
    63 int compare_virtual_memory_site(const VirtualMemoryAllocationSite& s1,
    64   const VirtualMemoryAllocationSite& s2) {
    65   return s1.call_stack()->compare(*s2.call_stack());
    66 }
    68 /*
    69  * Walker to walk malloc allocation site table
    70  */
    71 class MallocAllocationSiteWalker : public MallocSiteWalker {
    72  private:
    73   SortedLinkedList<MallocSite, compare_malloc_size, ResourceObj::ARENA>
    74                  _malloc_sites;
    75   size_t         _count;
    77   // Entries in MallocSiteTable with size = 0 and count = 0,
    78   // when the malloc site is not longer there.
    79  public:
    80   MallocAllocationSiteWalker(Arena* arena) : _count(0), _malloc_sites(arena) {
    81   }
    83   inline size_t count() const { return _count; }
    85   LinkedList<MallocSite>* malloc_sites() {
    86     return &_malloc_sites;
    87   }
    89   bool do_malloc_site(const MallocSite* site) {
    90     if (site->size() >= MemBaseline::SIZE_THRESHOLD) {
    91       if (_malloc_sites.add(*site) != NULL) {
    92         _count++;
    93         return true;
    94       } else {
    95         return false;  // OOM
    96       }
    97     } else {
    98       // malloc site does not meet threshold, ignore and continue
    99       return true;
   100     }
   101   }
   102 };
   104 // Compare virtual memory region's base address
   105 int compare_virtual_memory_base(const ReservedMemoryRegion& r1, const ReservedMemoryRegion& r2) {
   106   return r1.compare(r2);
   107 }
   109 // Walk all virtual memory regions for baselining
   110 class VirtualMemoryAllocationWalker : public VirtualMemoryWalker {
   111  private:
   112   SortedLinkedList<ReservedMemoryRegion, compare_virtual_memory_base, ResourceObj::ARENA>
   113                 _virtual_memory_regions;
   114   size_t        _count;
   116  public:
   117   VirtualMemoryAllocationWalker(Arena* a) : _count(0), _virtual_memory_regions(a) {
   118   }
   120   bool do_allocation_site(const ReservedMemoryRegion* rgn)  {
   121     if (rgn->size() >= MemBaseline::SIZE_THRESHOLD) {
   122       if (_virtual_memory_regions.add(*rgn) != NULL) {
   123         _count ++;
   124         return true;
   125       } else {
   126         return false;
   127       }
   128     }
   129     return true;
   130   }
   132   LinkedList<ReservedMemoryRegion>* virtual_memory_allocations() {
   133     return &_virtual_memory_regions;
   134   }
   135 };
   138 bool MemBaseline::baseline_summary() {
   139   assert(_malloc_memory_snapshot == NULL, "Malloc baseline not yet reset");
   140   assert(_virtual_memory_snapshot == NULL, "Virtual baseline not yet reset");
   142   _malloc_memory_snapshot =  new (arena()) MallocMemorySnapshot();
   143   _virtual_memory_snapshot = new (arena()) VirtualMemorySnapshot();
   144   if (_malloc_memory_snapshot == NULL || _virtual_memory_snapshot == NULL) {
   145     return false;
   146   }
   147   MallocMemorySummary::snapshot(_malloc_memory_snapshot);
   148   VirtualMemorySummary::snapshot(_virtual_memory_snapshot);
   149   return true;
   150 }
   152 bool MemBaseline::baseline_allocation_sites() {
   153   assert(arena() != NULL, "Just check");
   154   // Malloc allocation sites
   155   MallocAllocationSiteWalker malloc_walker(arena());
   156   if (!MallocSiteTable::walk_malloc_site(&malloc_walker)) {
   157     return false;
   158   }
   160   _malloc_sites.set_head(malloc_walker.malloc_sites()->head());
   161   // The malloc sites are collected in size order
   162   _malloc_sites_order = by_size;
   164   // Virtual memory allocation sites
   165   VirtualMemoryAllocationWalker virtual_memory_walker(arena());
   166   if (!VirtualMemoryTracker::walk_virtual_memory(&virtual_memory_walker)) {
   167     return false;
   168   }
   170   // Virtual memory allocations are collected in call stack order
   171   _virtual_memory_allocations.set_head(virtual_memory_walker.virtual_memory_allocations()->head());
   173   if (!aggregate_virtual_memory_allocation_sites()) {
   174     return false;
   175   }
   176   // Virtual memory allocation sites are aggregrated in call stack order
   177   _virtual_memory_sites_order = by_address;
   179   return true;
   180 }
   182 bool MemBaseline::baseline(bool summaryOnly) {
   183   if (arena() == NULL) {
   184     _arena = new (std::nothrow, mtNMT) Arena(mtNMT);
   185     if (arena() == NULL) return false;
   186   }
   188   reset();
   190   _class_count = InstanceKlass::number_of_instance_classes();
   192   if (!baseline_summary()) {
   193     return false;
   194   }
   196   _baseline_type = Summary_baselined;
   198   // baseline details
   199   if (!summaryOnly &&
   200       MemTracker::tracking_level() == NMT_detail) {
   201     baseline_allocation_sites();
   202     _baseline_type = Detail_baselined;
   203   }
   205   return true;
   206 }
   208 int compare_allocation_site(const VirtualMemoryAllocationSite& s1,
   209   const VirtualMemoryAllocationSite& s2) {
   210   return s1.call_stack()->compare(*s2.call_stack());
   211 }
   213 bool MemBaseline::aggregate_virtual_memory_allocation_sites() {
   214   SortedLinkedList<VirtualMemoryAllocationSite, compare_allocation_site, ResourceObj::ARENA>
   215     allocation_sites(arena());
   217   VirtualMemoryAllocationIterator itr = virtual_memory_allocations();
   218   const ReservedMemoryRegion* rgn;
   219   VirtualMemoryAllocationSite* site;
   220   while ((rgn = itr.next()) != NULL) {
   221     VirtualMemoryAllocationSite tmp(*rgn->call_stack());
   222     site = allocation_sites.find(tmp);
   223     if (site == NULL) {
   224       LinkedListNode<VirtualMemoryAllocationSite>* node =
   225         allocation_sites.add(tmp);
   226       if (node == NULL) return false;
   227       site = node->data();
   228     }
   229     site->reserve_memory(rgn->size());
   230     site->commit_memory(rgn->committed_size());
   231   }
   233   _virtual_memory_sites.set_head(allocation_sites.head());
   234   return true;
   235 }
   237 MallocSiteIterator MemBaseline::malloc_sites(SortingOrder order) {
   238   assert(!_malloc_sites.is_empty(), "Detail baseline?");
   239   switch(order) {
   240     case by_size:
   241       malloc_sites_to_size_order();
   242       break;
   243     case by_site:
   244       malloc_sites_to_allocation_site_order();
   245       break;
   246     case by_address:
   247     default:
   248       ShouldNotReachHere();
   249   }
   250   return MallocSiteIterator(_malloc_sites.head());
   251 }
   253 VirtualMemorySiteIterator MemBaseline::virtual_memory_sites(SortingOrder order) {
   254   assert(!_virtual_memory_sites.is_empty(), "Detail baseline?");
   255   switch(order) {
   256     case by_size:
   257       virtual_memory_sites_to_size_order();
   258       break;
   259     case by_site:
   260       virtual_memory_sites_to_reservation_site_order();
   261       break;
   262     case by_address:
   263     default:
   264       ShouldNotReachHere();
   265   }
   266   return VirtualMemorySiteIterator(_virtual_memory_sites.head());
   267 }
   270 // Sorting allocations sites in different orders
   271 void MemBaseline::malloc_sites_to_size_order() {
   272   if (_malloc_sites_order != by_size) {
   273     SortedLinkedList<MallocSite, compare_malloc_size, ResourceObj::ARENA>
   274       tmp(arena());
   276     // Add malloc sites to sorted linked list to sort into size order
   277     tmp.move(&_malloc_sites);
   278     _malloc_sites.set_head(tmp.head());
   279     tmp.set_head(NULL);
   280     _malloc_sites_order = by_size;
   281   }
   282 }
   284 void MemBaseline::malloc_sites_to_allocation_site_order() {
   285   if (_malloc_sites_order != by_site) {
   286     SortedLinkedList<MallocSite, compare_malloc_site, ResourceObj::ARENA>
   287       tmp(arena());
   288     // Add malloc sites to sorted linked list to sort into site (address) order
   289     tmp.move(&_malloc_sites);
   290     _malloc_sites.set_head(tmp.head());
   291     tmp.set_head(NULL);
   292     _malloc_sites_order = by_site;
   293   }
   294 }
   296 void MemBaseline::virtual_memory_sites_to_size_order() {
   297   if (_virtual_memory_sites_order != by_size) {
   298     SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_size, ResourceObj::ARENA>
   299       tmp(arena());
   301     tmp.move(&_virtual_memory_sites);
   303     _virtual_memory_sites.set_head(tmp.head());
   304     tmp.set_head(NULL);
   305     _virtual_memory_sites_order = by_size;
   306   }
   307 }
   309 void MemBaseline::virtual_memory_sites_to_reservation_site_order() {
   310   if (_virtual_memory_sites_order != by_size) {
   311     SortedLinkedList<VirtualMemoryAllocationSite, compare_virtual_memory_site, ResourceObj::ARENA>
   312       tmp(arena());
   314     tmp.add(&_virtual_memory_sites);
   316     _virtual_memory_sites.set_head(tmp.head());
   317     tmp.set_head(NULL);
   319     _virtual_memory_sites_order = by_size;
   320   }
   321 }

mercurial