src/share/vm/memory/metaspace.hpp

Fri, 25 Oct 2013 15:19:29 -0400

author
coleenp
date
Fri, 25 Oct 2013 15:19:29 -0400
changeset 6029
209aa13ab8c0
parent 6027
a6177f601c64
child 6305
40353abd7984
child 6487
15120a36272d
permissions
-rw-r--r--

8024927: Nashorn performance regression with CompressedOops
Summary: Allocate compressed class space at end of Java heap. For small heap sizes, without CDS, save some space so compressed classes can have the same favorable compression as oops
Reviewed-by: stefank, hseigel, goetz

     1 /*
     2  * Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    24 #ifndef SHARE_VM_MEMORY_METASPACE_HPP
    25 #define SHARE_VM_MEMORY_METASPACE_HPP
    27 #include "memory/allocation.hpp"
    28 #include "memory/memRegion.hpp"
    29 #include "runtime/virtualspace.hpp"
    30 #include "utilities/exceptions.hpp"
    32 // Metaspace
    33 //
    34 // Metaspaces are Arenas for the VM's metadata.
    35 // They are allocated one per class loader object, and one for the null
    36 // bootstrap class loader
    37 // Eventually for bootstrap loader we'll have a read-only section and read-write
    38 // to write for DumpSharedSpaces and read for UseSharedSpaces
    39 //
    40 //    block X ---+       +-------------------+
    41 //               |       |  Virtualspace     |
    42 //               |       |                   |
    43 //               |       |                   |
    44 //               |       |-------------------|
    45 //               |       || Chunk            |
    46 //               |       ||                  |
    47 //               |       ||----------        |
    48 //               +------>||| block 0 |       |
    49 //                       ||----------        |
    50 //                       ||| block 1 |       |
    51 //                       ||----------        |
    52 //                       ||                  |
    53 //                       |-------------------|
    54 //                       |                   |
    55 //                       |                   |
    56 //                       +-------------------+
    57 //
    59 class ChunkManager;
    60 class ClassLoaderData;
    61 class Metablock;
    62 class Metachunk;
    63 class MetaWord;
    64 class Mutex;
    65 class outputStream;
    66 class SpaceManager;
    67 class VirtualSpaceList;
    69 // Metaspaces each have a  SpaceManager and allocations
    70 // are done by the SpaceManager.  Allocations are done
    71 // out of the current Metachunk.  When the current Metachunk
    72 // is exhausted, the SpaceManager gets a new one from
    73 // the current VirtualSpace.  When the VirtualSpace is exhausted
    74 // the SpaceManager gets a new one.  The SpaceManager
    75 // also manages freelists of available Chunks.
    76 //
    77 // Currently the space manager maintains the list of
    78 // virtual spaces and the list of chunks in use.  Its
    79 // allocate() method returns a block for use as a
    80 // quantum of metadata.
    82 class Metaspace : public CHeapObj<mtClass> {
    83   friend class VMStructs;
    84   friend class SpaceManager;
    85   friend class VM_CollectForMetadataAllocation;
    86   friend class MetaspaceGC;
    87   friend class MetaspaceAux;
    89  public:
    90   enum MetadataType {
    91     ClassType,
    92     NonClassType,
    93     MetadataTypeCount
    94   };
    95   enum MetaspaceType {
    96     StandardMetaspaceType,
    97     BootMetaspaceType,
    98     ROMetaspaceType,
    99     ReadWriteMetaspaceType,
   100     AnonymousMetaspaceType,
   101     ReflectionMetaspaceType
   102   };
   104  private:
   105   void initialize(Mutex* lock, MetaspaceType type);
   107   // Get the first chunk for a Metaspace.  Used for
   108   // special cases such as the boot class loader, reflection
   109   // class loader and anonymous class loader.
   110   Metachunk* get_initialization_chunk(MetadataType mdtype,
   111                                       size_t chunk_word_size,
   112                                       size_t chunk_bunch);
   114   // Align up the word size to the allocation word size
   115   static size_t align_word_size_up(size_t);
   117   // Aligned size of the metaspace.
   118   static size_t _compressed_class_space_size;
   120   static size_t compressed_class_space_size() {
   121     return _compressed_class_space_size;
   122   }
   123   static void set_compressed_class_space_size(size_t size) {
   124     _compressed_class_space_size = size;
   125   }
   127   static size_t _first_chunk_word_size;
   128   static size_t _first_class_chunk_word_size;
   130   static size_t _commit_alignment;
   131   static size_t _reserve_alignment;
   133   SpaceManager* _vsm;
   134   SpaceManager* vsm() const { return _vsm; }
   136   SpaceManager* _class_vsm;
   137   SpaceManager* class_vsm() const { return _class_vsm; }
   139   // Allocate space for metadata of type mdtype. This is space
   140   // within a Metachunk and is used by
   141   //   allocate(ClassLoaderData*, size_t, bool, MetadataType, TRAPS)
   142   MetaWord* allocate(size_t word_size, MetadataType mdtype);
   144   // Virtual Space lists for both classes and other metadata
   145   static VirtualSpaceList* _space_list;
   146   static VirtualSpaceList* _class_space_list;
   148   static ChunkManager* _chunk_manager_metadata;
   149   static ChunkManager* _chunk_manager_class;
   151  public:
   152   static VirtualSpaceList* space_list()       { return _space_list; }
   153   static VirtualSpaceList* class_space_list() { return _class_space_list; }
   154   static VirtualSpaceList* get_space_list(MetadataType mdtype) {
   155     assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
   156     return mdtype == ClassType ? class_space_list() : space_list();
   157   }
   159   static ChunkManager* chunk_manager_metadata() { return _chunk_manager_metadata; }
   160   static ChunkManager* chunk_manager_class()    { return _chunk_manager_class; }
   161   static ChunkManager* get_chunk_manager(MetadataType mdtype) {
   162     assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
   163     return mdtype == ClassType ? chunk_manager_class() : chunk_manager_metadata();
   164   }
   166  private:
   167   // This is used by DumpSharedSpaces only, where only _vsm is used. So we will
   168   // maintain a single list for now.
   169   void record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size);
   171 #ifdef _LP64
   172   static void set_narrow_klass_base_and_shift(address metaspace_base, address cds_base);
   174   // Returns true if can use CDS with metaspace allocated as specified address.
   175   static bool can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base);
   177   static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base);
   179   static void initialize_class_space(ReservedSpace rs);
   180 #endif
   182   class AllocRecord : public CHeapObj<mtClass> {
   183   public:
   184     AllocRecord(address ptr, MetaspaceObj::Type type, int byte_size)
   185       : _next(NULL), _ptr(ptr), _type(type), _byte_size(byte_size) {}
   186     AllocRecord *_next;
   187     address _ptr;
   188     MetaspaceObj::Type _type;
   189     int _byte_size;
   190   };
   192   AllocRecord * _alloc_record_head;
   193   AllocRecord * _alloc_record_tail;
   195   size_t class_chunk_size(size_t word_size);
   197  public:
   199   Metaspace(Mutex* lock, MetaspaceType type);
   200   ~Metaspace();
   202   static void ergo_initialize();
   203   static void global_initialize();
   205   static size_t first_chunk_word_size() { return _first_chunk_word_size; }
   206   static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
   208   static size_t reserve_alignment()       { return _reserve_alignment; }
   209   static size_t reserve_alignment_words() { return _reserve_alignment / BytesPerWord; }
   210   static size_t commit_alignment()        { return _commit_alignment; }
   211   static size_t commit_alignment_words()  { return _commit_alignment / BytesPerWord; }
   213   char*  bottom() const;
   214   size_t used_words_slow(MetadataType mdtype) const;
   215   size_t free_words_slow(MetadataType mdtype) const;
   216   size_t capacity_words_slow(MetadataType mdtype) const;
   218   size_t used_bytes_slow(MetadataType mdtype) const;
   219   size_t capacity_bytes_slow(MetadataType mdtype) const;
   221   static MetaWord* allocate(ClassLoaderData* loader_data, size_t word_size,
   222                             bool read_only, MetaspaceObj::Type type, TRAPS);
   223   void deallocate(MetaWord* ptr, size_t byte_size, bool is_class);
   225   MetaWord* expand_and_allocate(size_t size,
   226                                 MetadataType mdtype);
   228   static bool contains(const void *ptr);
   229   void dump(outputStream* const out) const;
   231   // Free empty virtualspaces
   232   static void purge(MetadataType mdtype);
   233   static void purge();
   235   static void report_metadata_oome(ClassLoaderData* loader_data, size_t word_size,
   236                                    MetadataType mdtype, TRAPS);
   238   void print_on(outputStream* st) const;
   239   // Debugging support
   240   void verify();
   242   class AllocRecordClosure :  public StackObj {
   243   public:
   244     virtual void doit(address ptr, MetaspaceObj::Type type, int byte_size) = 0;
   245   };
   247   void iterate(AllocRecordClosure *closure);
   249   // Return TRUE only if UseCompressedClassPointers is True and DumpSharedSpaces is False.
   250   static bool using_class_space() {
   251     return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers && !DumpSharedSpaces);
   252   }
   254   static bool is_class_space_allocation(MetadataType mdType) {
   255     return mdType == ClassType && using_class_space();
   256   }
   258 };
   260 class MetaspaceAux : AllStatic {
   261   static size_t free_chunks_total_words(Metaspace::MetadataType mdtype);
   263   // These methods iterate over the classloader data graph
   264   // for the given Metaspace type.  These are slow.
   265   static size_t used_bytes_slow(Metaspace::MetadataType mdtype);
   266   static size_t free_bytes_slow(Metaspace::MetadataType mdtype);
   267   static size_t capacity_bytes_slow(Metaspace::MetadataType mdtype);
   268   static size_t capacity_bytes_slow();
   270   // Running sum of space in all Metachunks that has been
   271   // allocated to a Metaspace.  This is used instead of
   272   // iterating over all the classloaders. One for each
   273   // type of Metadata
   274   static size_t _allocated_capacity_words[Metaspace:: MetadataTypeCount];
   275   // Running sum of space in all Metachunks that have
   276   // are being used for metadata. One for each
   277   // type of Metadata.
   278   static size_t _allocated_used_words[Metaspace:: MetadataTypeCount];
   280  public:
   281   // Decrement and increment _allocated_capacity_words
   282   static void dec_capacity(Metaspace::MetadataType type, size_t words);
   283   static void inc_capacity(Metaspace::MetadataType type, size_t words);
   285   // Decrement and increment _allocated_used_words
   286   static void dec_used(Metaspace::MetadataType type, size_t words);
   287   static void inc_used(Metaspace::MetadataType type, size_t words);
   289   // Total of space allocated to metadata in all Metaspaces.
   290   // This sums the space used in each Metachunk by
   291   // iterating over the classloader data graph
   292   static size_t used_bytes_slow() {
   293     return used_bytes_slow(Metaspace::ClassType) +
   294            used_bytes_slow(Metaspace::NonClassType);
   295   }
   297   // Used by MetaspaceCounters
   298   static size_t free_chunks_total_words();
   299   static size_t free_chunks_total_bytes();
   300   static size_t free_chunks_total_bytes(Metaspace::MetadataType mdtype);
   302   static size_t allocated_capacity_words(Metaspace::MetadataType mdtype) {
   303     return _allocated_capacity_words[mdtype];
   304   }
   305   static size_t allocated_capacity_words() {
   306     return allocated_capacity_words(Metaspace::NonClassType) +
   307            allocated_capacity_words(Metaspace::ClassType);
   308   }
   309   static size_t allocated_capacity_bytes(Metaspace::MetadataType mdtype) {
   310     return allocated_capacity_words(mdtype) * BytesPerWord;
   311   }
   312   static size_t allocated_capacity_bytes() {
   313     return allocated_capacity_words() * BytesPerWord;
   314   }
   316   static size_t allocated_used_words(Metaspace::MetadataType mdtype) {
   317     return _allocated_used_words[mdtype];
   318   }
   319   static size_t allocated_used_words() {
   320     return allocated_used_words(Metaspace::NonClassType) +
   321            allocated_used_words(Metaspace::ClassType);
   322   }
   323   static size_t allocated_used_bytes(Metaspace::MetadataType mdtype) {
   324     return allocated_used_words(mdtype) * BytesPerWord;
   325   }
   326   static size_t allocated_used_bytes() {
   327     return allocated_used_words() * BytesPerWord;
   328   }
   330   static size_t free_bytes();
   331   static size_t free_bytes(Metaspace::MetadataType mdtype);
   333   static size_t reserved_bytes(Metaspace::MetadataType mdtype);
   334   static size_t reserved_bytes() {
   335     return reserved_bytes(Metaspace::ClassType) +
   336            reserved_bytes(Metaspace::NonClassType);
   337   }
   339   static size_t committed_bytes(Metaspace::MetadataType mdtype);
   340   static size_t committed_bytes() {
   341     return committed_bytes(Metaspace::ClassType) +
   342            committed_bytes(Metaspace::NonClassType);
   343   }
   345   static size_t min_chunk_size_words();
   346   static size_t min_chunk_size_bytes() {
   347     return min_chunk_size_words() * BytesPerWord;
   348   }
   350   // Print change in used metadata.
   351   static void print_metaspace_change(size_t prev_metadata_used);
   352   static void print_on(outputStream * out);
   353   static void print_on(outputStream * out, Metaspace::MetadataType mdtype);
   355   static void print_class_waste(outputStream* out);
   356   static void print_waste(outputStream* out);
   357   static void dump(outputStream* out);
   358   static void verify_free_chunks();
   359   // Checks that the values returned by allocated_capacity_bytes() and
   360   // capacity_bytes_slow() are the same.
   361   static void verify_capacity();
   362   static void verify_used();
   363   static void verify_metrics();
   364 };
   366 // Metaspace are deallocated when their class loader are GC'ed.
   367 // This class implements a policy for inducing GC's to recover
   368 // Metaspaces.
   370 class MetaspaceGC : AllStatic {
   372   // The current high-water-mark for inducing a GC.
   373   // When committed memory of all metaspaces reaches this value,
   374   // a GC is induced and the value is increased. Size is in bytes.
   375   static volatile intptr_t _capacity_until_GC;
   377   // For a CMS collection, signal that a concurrent collection should
   378   // be started.
   379   static bool _should_concurrent_collect;
   381   static uint _shrink_factor;
   383   static size_t shrink_factor() { return _shrink_factor; }
   384   void set_shrink_factor(uint v) { _shrink_factor = v; }
   386  public:
   388   static void initialize() { _capacity_until_GC = MetaspaceSize; }
   390   static size_t capacity_until_GC();
   391   static size_t inc_capacity_until_GC(size_t v);
   392   static size_t dec_capacity_until_GC(size_t v);
   394   static bool should_concurrent_collect() { return _should_concurrent_collect; }
   395   static void set_should_concurrent_collect(bool v) {
   396     _should_concurrent_collect = v;
   397   }
   399   // The amount to increase the high-water-mark (_capacity_until_GC)
   400   static size_t delta_capacity_until_GC(size_t bytes);
   402   // Tells if we have can expand metaspace without hitting set limits.
   403   static bool can_expand(size_t words, bool is_class);
   405   // Returns amount that we can expand without hitting a GC,
   406   // measured in words.
   407   static size_t allowed_expansion();
   409   // Calculate the new high-water mark at which to induce
   410   // a GC.
   411   static void compute_new_size();
   412 };
   414 #endif // SHARE_VM_MEMORY_METASPACE_HPP

mercurial