src/share/vm/memory/dump.cpp

Thu, 31 Mar 2011 14:00:41 -0700

author
never
date
Thu, 31 Mar 2011 14:00:41 -0700
changeset 2700
352622fd140a
parent 2661
b099aaf51bf8
child 2777
8ce625481709
permissions
-rw-r--r--

7032129: Native memory usage grow unexpectedly for vm/oom/*InternedString tests
Reviewed-by: kvn, kamg, jcoomes

     1 /*
     2  * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/loaderConstraints.hpp"
    28 #include "classfile/symbolTable.hpp"
    29 #include "classfile/systemDictionary.hpp"
    30 #include "gc_implementation/shared/spaceDecorator.hpp"
    31 #include "memory/classify.hpp"
    32 #include "memory/filemap.hpp"
    33 #include "memory/oopFactory.hpp"
    34 #include "memory/resourceArea.hpp"
    35 #include "oops/methodDataOop.hpp"
    36 #include "oops/oop.inline.hpp"
    37 #include "runtime/javaCalls.hpp"
    38 #include "runtime/signature.hpp"
    39 #include "runtime/vmThread.hpp"
    40 #include "runtime/vm_operations.hpp"
    41 #include "utilities/copy.hpp"
    44 // Closure to set up the fingerprint field for all methods.
    46 class FingerprintMethodsClosure: public ObjectClosure {
    47 public:
    48   void do_object(oop obj) {
    49     if (obj->is_method()) {
    50       methodOop mobj = (methodOop)obj;
    51       ResourceMark rm;
    52       (new Fingerprinter(mobj))->fingerprint();
    53     }
    54   }
    55 };
    59 // Closure to set the hash value (String.hash field) in all of the
    60 // String objects in the heap.  Setting the hash value is not required.
    61 // However, setting the value in advance prevents the value from being
    62 // written later, increasing the likelihood that the shared page contain
    63 // the hash can be shared.
    64 //
    65 // NOTE THAT the algorithm in StringTable::hash_string() MUST MATCH the
    66 // algorithm in java.lang.String.hashCode().
    68 class StringHashCodeClosure: public OopClosure {
    69 private:
    70   Thread* THREAD;
    71   int hash_offset;
    72 public:
    73   StringHashCodeClosure(Thread* t) {
    74     THREAD = t;
    75     hash_offset = java_lang_String::hash_offset_in_bytes();
    76   }
    78   void do_oop(oop* p) {
    79     if (p != NULL) {
    80       oop obj = *p;
    81       if (obj->klass() == SystemDictionary::String_klass()) {
    83         int hash = java_lang_String::hash_string(obj);
    84         obj->int_field_put(hash_offset, hash);
    85       }
    86     }
    87   }
    88   void do_oop(narrowOop* p) { ShouldNotReachHere(); }
    89 };
    92 // Remove data from objects which should not appear in the shared file
    93 // (as it pertains only to the current JVM).
    95 class RemoveUnshareableInfoClosure : public ObjectClosure {
    96 public:
    97   void do_object(oop obj) {
    98     // Zap data from the objects which is pertains only to this JVM.  We
    99     // want that data recreated in new JVMs when the shared file is used.
   100     if (obj->is_method()) {
   101       ((methodOop)obj)->remove_unshareable_info();
   102     }
   103     else if (obj->is_klass()) {
   104       Klass::cast((klassOop)obj)->remove_unshareable_info();
   105     }
   107     // Don't save compiler related special oops (shouldn't be any yet).
   108     if (obj->is_methodData() || obj->is_compiledICHolder()) {
   109       ShouldNotReachHere();
   110     }
   111   }
   112 };
   115 static bool mark_object(oop obj) {
   116   if (obj != NULL &&
   117       !obj->is_shared() &&
   118       !obj->is_forwarded() &&
   119       !obj->is_gc_marked()) {
   120     obj->set_mark(markOopDesc::prototype()->set_marked());
   121     return true;
   122   }
   124   return false;
   125 }
   128 class MoveSymbols : public SymbolClosure {
   129 private:
   130   char* _start;
   131   char* _end;
   132   char* _top;
   133   int _count;
   135   bool in_shared_space(Symbol* sym) const {
   136     return (char*)sym >= _start && (char*)sym < _end;
   137   }
   139   Symbol* get_shared_copy(Symbol* sym) {
   140     return sym->refcount() > 0 ? NULL : (Symbol*)(_start - sym->refcount());
   141   }
   143   Symbol* make_shared_copy(Symbol* sym) {
   144     Symbol* new_sym = (Symbol*)_top;
   145     int size = sym->object_size();
   146     _top += size * HeapWordSize;
   147     if (_top <= _end) {
   148       Copy::disjoint_words((HeapWord*)sym, (HeapWord*)new_sym, size);
   149       // Encode a reference to the copy as a negative distance from _start
   150       // When a symbol is being copied to a shared space
   151       // during CDS archive creation, the original symbol is marked
   152       // as relocated by putting a negative value to its _refcount field,
   153       // This value is also used to find where exactly the shared copy is
   154       // (see MoveSymbols::get_shared_copy), so that the other references
   155       // to this symbol could be changed to point to the shared copy.
   156       sym->_refcount = (int)(_start - (char*)new_sym);
   157       // Mark the symbol in the shared archive as immortal so it is read only
   158       // and not refcounted.
   159       new_sym->_refcount = -1;
   160       _count++;
   161     } else {
   162       report_out_of_shared_space(SharedMiscData);
   163     }
   164     return new_sym;
   165   }
   167 public:
   168   MoveSymbols(char* top, char* end) :
   169     _start(top), _end(end), _top(top), _count(0) { }
   171   char* get_top() const { return _top; }
   172   int count()     const { return _count; }
   174   void do_symbol(Symbol** p) {
   175     Symbol* sym = load_symbol(p);
   176     if (sym != NULL && !in_shared_space(sym)) {
   177       Symbol* new_sym = get_shared_copy(sym);
   178       if (new_sym == NULL) {
   179         // The symbol has not been relocated yet; copy it to _top address
   180         assert(sym->refcount() > 0, "should have positive reference count");
   181         new_sym = make_shared_copy(sym);
   182       }
   183       // Make the reference point to the shared copy of the symbol
   184       store_symbol(p, new_sym);
   185     }
   186   }
   187 };
   190 // Closure:  mark objects closure.
   192 class MarkObjectsOopClosure : public OopClosure {
   193 public:
   194   void do_oop(oop* p)       { mark_object(*p); }
   195   void do_oop(narrowOop* p) { ShouldNotReachHere(); }
   196 };
   199 class MarkObjectsSkippingKlassesOopClosure : public OopClosure {
   200 public:
   201   void do_oop(oop* pobj) {
   202     oop obj = *pobj;
   203     if (obj != NULL &&
   204         !obj->is_klass()) {
   205       mark_object(obj);
   206     }
   207   }
   208   void do_oop(narrowOop* pobj) { ShouldNotReachHere(); }
   209 };
   212 static void mark_object_recursive_skipping_klasses(oop obj) {
   213   mark_object(obj);
   214   if (obj != NULL) {
   215     MarkObjectsSkippingKlassesOopClosure mark_all;
   216     obj->oop_iterate(&mark_all);
   217   }
   218 }
   221 // Closure:  mark common read-only objects
   223 class MarkCommonReadOnly : public ObjectClosure {
   224 private:
   225   MarkObjectsOopClosure mark_all;
   226 public:
   227   void do_object(oop obj) {
   229     // Mark all constMethod objects.
   231     if (obj->is_constMethod()) {
   232       mark_object(obj);
   233       mark_object(constMethodOop(obj)->stackmap_data());
   234       // Exception tables are needed by ci code during compilation.
   235       mark_object(constMethodOop(obj)->exception_table());
   236     }
   238     // Mark objects referenced by klass objects which are read-only.
   240     else if (obj->is_klass()) {
   241       Klass* k = Klass::cast((klassOop)obj);
   242       mark_object(k->secondary_supers());
   244       // The METHODS() OBJARRAYS CANNOT BE MADE READ-ONLY, even though
   245       // it is never modified. Otherwise, they will be pre-marked; the
   246       // GC marking phase will skip them; and by skipping them will fail
   247       // to mark the methods objects referenced by the array.
   249       if (obj->blueprint()->oop_is_instanceKlass()) {
   250         instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   251         mark_object(ik->method_ordering());
   252         mark_object(ik->local_interfaces());
   253         mark_object(ik->transitive_interfaces());
   254         mark_object(ik->fields());
   256         mark_object(ik->class_annotations());
   258         mark_object_recursive_skipping_klasses(ik->fields_annotations());
   259         mark_object_recursive_skipping_klasses(ik->methods_annotations());
   260         mark_object_recursive_skipping_klasses(ik->methods_parameter_annotations());
   261         mark_object_recursive_skipping_klasses(ik->methods_default_annotations());
   263         typeArrayOop inner_classes = ik->inner_classes();
   264         if (inner_classes != NULL) {
   265           mark_object(inner_classes);
   266         }
   267       }
   268     }
   269   }
   270 };
   273 // Closure:  find symbol references in Java Heap objects
   275 class CommonSymbolsClosure : public ObjectClosure {
   276 private:
   277   SymbolClosure* _closure;
   278 public:
   279   CommonSymbolsClosure(SymbolClosure* closure) : _closure(closure) { }
   281   void do_object(oop obj) {
   283     // Traverse symbols referenced by method objects.
   285     if (obj->is_method()) {
   286       methodOop m = methodOop(obj);
   287       constantPoolOop constants = m->constants();
   288       _closure->do_symbol(constants->symbol_at_addr(m->name_index()));
   289       _closure->do_symbol(constants->symbol_at_addr(m->signature_index()));
   290     }
   292     // Traverse symbols referenced by klass objects which are read-only.
   294     else if (obj->is_klass()) {
   295       Klass* k = Klass::cast((klassOop)obj);
   296       k->shared_symbols_iterate(_closure);
   298       if (obj->blueprint()->oop_is_instanceKlass()) {
   299         instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   300         typeArrayOop inner_classes = ik->inner_classes();
   301         if (inner_classes != NULL) {
   302           constantPoolOop constants = ik->constants();
   303           int n = inner_classes->length();
   304           for (int i = 0; i < n; i += instanceKlass::inner_class_next_offset) {
   305             int ioff = i + instanceKlass::inner_class_inner_name_offset;
   306             int index = inner_classes->ushort_at(ioff);
   307             if (index != 0) {
   308               _closure->do_symbol(constants->symbol_at_addr(index));
   309             }
   310           }
   311         }
   312       }
   313     }
   315     // Traverse symbols referenced by other constantpool entries.
   317     else if (obj->is_constantPool()) {
   318       constantPoolOop(obj)->shared_symbols_iterate(_closure);
   319     }
   320   }
   321 };
   324 // Closure:  mark char arrays used by strings
   326 class MarkStringValues : public ObjectClosure {
   327 private:
   328   MarkObjectsOopClosure mark_all;
   329 public:
   330   void do_object(oop obj) {
   332     // Character arrays referenced by String objects are read-only.
   334     if (java_lang_String::is_instance(obj)) {
   335       mark_object(java_lang_String::value(obj));
   336     }
   337   }
   338 };
   341 #ifdef DEBUG
   342 // Closure:  Check for objects left in the heap which have not been moved.
   344 class CheckRemainingObjects : public ObjectClosure {
   345 private:
   346   int count;
   348 public:
   349   CheckRemainingObjects() {
   350     count = 0;
   351   }
   353   void do_object(oop obj) {
   354     if (!obj->is_shared() &&
   355         !obj->is_forwarded()) {
   356       ++count;
   357       if (Verbose) {
   358         tty->print("Unreferenced object: ");
   359         obj->print_on(tty);
   360       }
   361     }
   362   }
   364   void status() {
   365     tty->print_cr("%d objects no longer referenced, not shared.", count);
   366   }
   367 };
   368 #endif
   371 // Closure:  Mark remaining objects read-write, except Strings.
   373 class MarkReadWriteObjects : public ObjectClosure {
   374 private:
   375   MarkObjectsOopClosure mark_objects;
   376 public:
   377   void do_object(oop obj) {
   379       // The METHODS() OBJARRAYS CANNOT BE MADE READ-ONLY, even though
   380       // it is never modified. Otherwise, they will be pre-marked; the
   381       // GC marking phase will skip them; and by skipping them will fail
   382       // to mark the methods objects referenced by the array.
   384     if (obj->is_klass()) {
   385       mark_object(obj);
   386       Klass* k = klassOop(obj)->klass_part();
   387       mark_object(k->java_mirror());
   388       if (obj->blueprint()->oop_is_instanceKlass()) {
   389         instanceKlass* ik = (instanceKlass*)k;
   390         mark_object(ik->methods());
   391         mark_object(ik->constants());
   392       }
   393       if (obj->blueprint()->oop_is_javaArray()) {
   394         arrayKlass* ak = (arrayKlass*)k;
   395         mark_object(ak->component_mirror());
   396       }
   397       return;
   398     }
   400     // Mark constantPool tags and the constantPoolCache.
   402     else if (obj->is_constantPool()) {
   403       constantPoolOop pool = constantPoolOop(obj);
   404       mark_object(pool->cache());
   405       pool->shared_tags_iterate(&mark_objects);
   406       return;
   407     }
   409     // Mark all method objects.
   411     if (obj->is_method()) {
   412       mark_object(obj);
   413     }
   414   }
   415 };
   418 // Closure:  Mark String objects read-write.
   420 class MarkStringObjects : public ObjectClosure {
   421 private:
   422   MarkObjectsOopClosure mark_objects;
   423 public:
   424   void do_object(oop obj) {
   426     // Mark String objects referenced by constant pool entries.
   428     if (obj->is_constantPool()) {
   429       constantPoolOop pool = constantPoolOop(obj);
   430       pool->shared_strings_iterate(&mark_objects);
   431       return;
   432     }
   433   }
   434 };
   437 // Move objects matching specified type (ie. lock_bits) to the specified
   438 // space.
   440 class MoveMarkedObjects : public ObjectClosure {
   441 private:
   442   OffsetTableContigSpace* _space;
   443   bool _read_only;
   445 public:
   446   MoveMarkedObjects(OffsetTableContigSpace* space, bool read_only) {
   447     _space = space;
   448     _read_only = read_only;
   449   }
   451   void do_object(oop obj) {
   452     if (obj->is_shared()) {
   453       return;
   454     }
   455     if (obj->is_gc_marked() && obj->forwardee() == NULL) {
   456       int s = obj->size();
   457       oop sh_obj = (oop)_space->allocate(s);
   458       if (sh_obj == NULL) {
   459         report_out_of_shared_space(_read_only ? SharedReadOnly : SharedReadWrite);
   460       }
   461       if (PrintSharedSpaces && Verbose && WizardMode) {
   462         tty->print_cr("\nMoveMarkedObjects: " PTR_FORMAT " -> " PTR_FORMAT " %s", obj, sh_obj,
   463                       (_read_only ? "ro" : "rw"));
   464       }
   465       Copy::aligned_disjoint_words((HeapWord*)obj, (HeapWord*)sh_obj, s);
   466       obj->forward_to(sh_obj);
   467       if (_read_only) {
   468         // Readonly objects: set hash value to self pointer and make gc_marked.
   469         sh_obj->forward_to(sh_obj);
   470       } else {
   471         sh_obj->init_mark();
   472       }
   473     }
   474   }
   475 };
   477 static void mark_and_move(oop obj, MoveMarkedObjects* move) {
   478   if (mark_object(obj)) move->do_object(obj);
   479 }
   481 enum order_policy {
   482   OP_favor_startup = 0,
   483   OP_balanced = 1,
   484   OP_favor_runtime = 2
   485 };
   487 static void mark_and_move_for_policy(order_policy policy, oop obj, MoveMarkedObjects* move) {
   488   if (SharedOptimizeColdStartPolicy >= policy) mark_and_move(obj, move);
   489 }
   491 class MarkAndMoveOrderedReadOnly : public ObjectClosure {
   492 private:
   493   MoveMarkedObjects *_move_ro;
   495 public:
   496   MarkAndMoveOrderedReadOnly(MoveMarkedObjects *move_ro) : _move_ro(move_ro) {}
   498   void do_object(oop obj) {
   499     if (obj->is_klass() && obj->blueprint()->oop_is_instanceKlass()) {
   500       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   501       int i;
   503       if (ik->super() != NULL) {
   504         do_object(ik->super());
   505       }
   507       objArrayOop interfaces = ik->local_interfaces();
   508       mark_and_move_for_policy(OP_favor_startup, interfaces, _move_ro);
   509       for(i = 0; i < interfaces->length(); i++) {
   510         klassOop k = klassOop(interfaces->obj_at(i));
   511         do_object(k);
   512       }
   514       objArrayOop methods = ik->methods();
   515       for(i = 0; i < methods->length(); i++) {
   516         methodOop m = methodOop(methods->obj_at(i));
   517         mark_and_move_for_policy(OP_favor_startup, m->constMethod(), _move_ro);
   518         mark_and_move_for_policy(OP_favor_runtime, m->constMethod()->exception_table(), _move_ro);
   519         mark_and_move_for_policy(OP_favor_runtime, m->constMethod()->stackmap_data(), _move_ro);
   520       }
   522       mark_and_move_for_policy(OP_favor_startup, ik->transitive_interfaces(), _move_ro);
   523       mark_and_move_for_policy(OP_favor_startup, ik->fields(), _move_ro);
   525       mark_and_move_for_policy(OP_favor_runtime, ik->secondary_supers(),  _move_ro);
   526       mark_and_move_for_policy(OP_favor_runtime, ik->method_ordering(),   _move_ro);
   527       mark_and_move_for_policy(OP_favor_runtime, ik->class_annotations(), _move_ro);
   528       mark_and_move_for_policy(OP_favor_runtime, ik->fields_annotations(), _move_ro);
   529       mark_and_move_for_policy(OP_favor_runtime, ik->methods_annotations(), _move_ro);
   530       mark_and_move_for_policy(OP_favor_runtime, ik->methods_parameter_annotations(), _move_ro);
   531       mark_and_move_for_policy(OP_favor_runtime, ik->methods_default_annotations(), _move_ro);
   532       mark_and_move_for_policy(OP_favor_runtime, ik->inner_classes(), _move_ro);
   533       mark_and_move_for_policy(OP_favor_runtime, ik->secondary_supers(), _move_ro);
   534     }
   535   }
   536 };
   538 class MarkAndMoveOrderedReadWrite: public ObjectClosure {
   539 private:
   540   MoveMarkedObjects *_move_rw;
   542 public:
   543   MarkAndMoveOrderedReadWrite(MoveMarkedObjects *move_rw) : _move_rw(move_rw) {}
   545   void do_object(oop obj) {
   546     if (obj->is_klass() && obj->blueprint()->oop_is_instanceKlass()) {
   547       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   548       int i;
   550       mark_and_move_for_policy(OP_favor_startup, ik->as_klassOop(), _move_rw);
   552       if (ik->super() != NULL) {
   553         do_object(ik->super());
   554       }
   556       objArrayOop interfaces = ik->local_interfaces();
   557       for(i = 0; i < interfaces->length(); i++) {
   558         klassOop k = klassOop(interfaces->obj_at(i));
   559         mark_and_move_for_policy(OP_favor_startup, k, _move_rw);
   560         do_object(k);
   561       }
   563       objArrayOop methods = ik->methods();
   564       mark_and_move_for_policy(OP_favor_startup, methods, _move_rw);
   565       for(i = 0; i < methods->length(); i++) {
   566         methodOop m = methodOop(methods->obj_at(i));
   567         mark_and_move_for_policy(OP_favor_startup, m, _move_rw);
   568         mark_and_move_for_policy(OP_favor_startup, ik->constants(), _move_rw);          // idempotent
   569         mark_and_move_for_policy(OP_balanced, ik->constants()->cache(), _move_rw); // idempotent
   570         mark_and_move_for_policy(OP_balanced, ik->constants()->tags(), _move_rw);  // idempotent
   571       }
   573       mark_and_move_for_policy(OP_favor_startup, ik->as_klassOop()->klass(), _move_rw);
   574       mark_and_move_for_policy(OP_favor_startup, ik->constants()->klass(), _move_rw);
   576       // Although Java mirrors are marked in MarkReadWriteObjects,
   577       // apparently they were never moved into shared spaces since
   578       // MoveMarkedObjects skips marked instance oops.  This may
   579       // be a bug in the original implementation or simply the vestige
   580       // of an abandoned experiment.  Nevertheless we leave a hint
   581       // here in case this capability is ever correctly implemented.
   582       //
   583       // mark_and_move_for_policy(OP_favor_runtime, ik->java_mirror(), _move_rw);
   584     }
   585   }
   587 };
   589 // Adjust references in oops to refer to shared spaces.
   591 class ResolveForwardingClosure: public OopClosure {
   592 public:
   593   void do_oop(oop* p) {
   594     oop obj = *p;
   595     if (!obj->is_shared()) {
   596       if (obj != NULL) {
   597         oop f = obj->forwardee();
   598         guarantee(f->is_shared(), "Oop doesn't refer to shared space.");
   599         *p = f;
   600       }
   601     }
   602   }
   603   void do_oop(narrowOop* pobj) { ShouldNotReachHere(); }
   604 };
   607 // The methods array must be reordered by Symbol* address.
   608 // (See classFileParser.cpp where methods in a class are originally
   609 // sorted). The addresses of symbols have been changed as a result
   610 // of moving to the shared space.
   612 class SortMethodsClosure: public ObjectClosure {
   613 public:
   614   void do_object(oop obj) {
   615     if (obj->blueprint()->oop_is_instanceKlass()) {
   616       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   617       methodOopDesc::sort_methods(ik->methods(),
   618                                   ik->methods_annotations(),
   619                                   ik->methods_parameter_annotations(),
   620                                   ik->methods_default_annotations(),
   621                                   true /* idempotent, slow */);
   622     }
   623   }
   624 };
   626 // Itable indices are calculated based on methods array order
   627 // (see klassItable::compute_itable_index()).  Must reinitialize
   628 // after ALL methods of ALL classes have been reordered.
   629 // We assume that since checkconstraints is false, this method
   630 // cannot throw an exception.  An exception here would be
   631 // problematic since this is the VMThread, not a JavaThread.
   633 class ReinitializeItables: public ObjectClosure {
   634 private:
   635   Thread* _thread;
   637 public:
   638   ReinitializeItables(Thread* thread) : _thread(thread) {}
   640   void do_object(oop obj) {
   641     if (obj->blueprint()->oop_is_instanceKlass()) {
   642       instanceKlass* ik = instanceKlass::cast((klassOop)obj);
   643       ik->itable()->initialize_itable(false, _thread);
   644     }
   645   }
   646 };
   649 // Adjust references in oops to refer to shared spaces.
   651 class PatchOopsClosure: public ObjectClosure {
   652 private:
   653   Thread* _thread;
   654   ResolveForwardingClosure resolve;
   656 public:
   657   PatchOopsClosure(Thread* thread) : _thread(thread) {}
   659   void do_object(oop obj) {
   660     obj->oop_iterate_header(&resolve);
   661     obj->oop_iterate(&resolve);
   663     assert(obj->klass()->is_shared(), "Klass not pointing into shared space.");
   665     // If the object is a Java object or class which might (in the
   666     // future) contain a reference to a young gen object, add it to the
   667     // list.
   669     if (obj->is_klass() || obj->is_instance()) {
   670       if (obj->is_klass() ||
   671           obj->is_a(SystemDictionary::Class_klass()) ||
   672           obj->is_a(SystemDictionary::Throwable_klass())) {
   673         // Do nothing
   674       }
   675       else if (obj->is_a(SystemDictionary::String_klass())) {
   676         // immutable objects.
   677       } else {
   678         // someone added an object we hadn't accounted for.
   679         ShouldNotReachHere();
   680       }
   681     }
   682   }
   683 };
   686 // Empty the young and old generations.
   688 class ClearSpaceClosure : public SpaceClosure {
   689 public:
   690   void do_space(Space* s) {
   691     s->clear(SpaceDecorator::Mangle);
   692   }
   693 };
   696 // Closure for serializing initialization data out to a data area to be
   697 // written to the shared file.
   699 class WriteClosure : public SerializeOopClosure {
   700 private:
   701   oop* top;
   702   char* end;
   704   inline void check_space() {
   705     if ((char*)top + sizeof(oop) > end) {
   706       report_out_of_shared_space(SharedMiscData);
   707     }
   708   }
   711 public:
   712   WriteClosure(char* md_top, char* md_end) {
   713     top = (oop*)md_top;
   714     end = md_end;
   715   }
   717   char* get_top() { return (char*)top; }
   719   void do_oop(oop* p) {
   720     check_space();
   721     oop obj = *p;
   722     assert(obj->is_oop_or_null(), "invalid oop");
   723     assert(obj == NULL || obj->is_shared(),
   724            "Oop in shared space not pointing into shared space.");
   725     *top = obj;
   726     ++top;
   727   }
   729   void do_oop(narrowOop* pobj) { ShouldNotReachHere(); }
   731   void do_int(int* p) {
   732     check_space();
   733     *top = (oop)(intptr_t)*p;
   734     ++top;
   735   }
   737   void do_size_t(size_t* p) {
   738     check_space();
   739     *top = (oop)(intptr_t)*p;
   740     ++top;
   741   }
   743   void do_ptr(void** p) {
   744     check_space();
   745     *top = (oop)*p;
   746     ++top;
   747   }
   749   void do_ptr(HeapWord** p) { do_ptr((void **) p); }
   751   void do_tag(int tag) {
   752     check_space();
   753     *top = (oop)(intptr_t)tag;
   754     ++top;
   755   }
   757   void do_region(u_char* start, size_t size) {
   758     if ((char*)top + size > end) {
   759       report_out_of_shared_space(SharedMiscData);
   760     }
   761     assert((intptr_t)start % sizeof(oop) == 0, "bad alignment");
   762     assert(size % sizeof(oop) == 0, "bad size");
   763     do_tag((int)size);
   764     while (size > 0) {
   765       *top = *(oop*)start;
   766       ++top;
   767       start += sizeof(oop);
   768       size -= sizeof(oop);
   769     }
   770   }
   772   bool reading() const { return false; }
   773 };
   776 class ResolveConstantPoolsClosure : public ObjectClosure {
   777 private:
   778   TRAPS;
   779 public:
   780   ResolveConstantPoolsClosure(Thread *t) {
   781     __the_thread__ = t;
   782   }
   783   void do_object(oop obj) {
   784     if (obj->is_constantPool()) {
   785       constantPoolOop cpool = (constantPoolOop)obj;
   786       int unresolved = cpool->pre_resolve_shared_klasses(THREAD);
   787     }
   788   }
   789 };
   792 // Print a summary of the contents of the read/write spaces to help
   793 // identify objects which might be able to be made read-only.  At this
   794 // point, the objects have been written, and we can trash them as
   795 // needed.
   797 static void print_contents() {
   798   if (PrintSharedSpaces) {
   799     GenCollectedHeap* gch = GenCollectedHeap::heap();
   800     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
   802     // High level summary of the read-only space:
   804     ClassifyObjectClosure coc;
   805     tty->cr(); tty->print_cr("ReadOnly space:");
   806     gen->ro_space()->object_iterate(&coc);
   807     coc.print();
   809     // High level summary of the read-write space:
   811     coc.reset();
   812     tty->cr(); tty->print_cr("ReadWrite space:");
   813     gen->rw_space()->object_iterate(&coc);
   814     coc.print();
   816     // Reset counters
   818     ClearAllocCountClosure cacc;
   819     gen->ro_space()->object_iterate(&cacc);
   820     gen->rw_space()->object_iterate(&cacc);
   821     coc.reset();
   823     // Lower level summary of the read-only space:
   825     gen->ro_space()->object_iterate(&coc);
   826     tty->cr(); tty->print_cr("ReadOnly space:");
   827     ClassifyInstanceKlassClosure cikc;
   828     gen->rw_space()->object_iterate(&cikc);
   829     cikc.print();
   831     // Reset counters
   833     gen->ro_space()->object_iterate(&cacc);
   834     gen->rw_space()->object_iterate(&cacc);
   835     coc.reset();
   837     // Lower level summary of the read-write space:
   839     gen->rw_space()->object_iterate(&coc);
   840     cikc.reset();
   841     tty->cr();  tty->print_cr("ReadWrite space:");
   842     gen->rw_space()->object_iterate(&cikc);
   843     cikc.print();
   844   }
   845 }
   848 // Patch C++ vtable pointer in klass oops.
   850 // Klass objects contain references to c++ vtables in the JVM library.
   851 // Fix them to point to our constructed vtables.  However, don't iterate
   852 // across the space while doing this, as that causes the vtables to be
   853 // patched, undoing our useful work.  Instead, iterate to make a list,
   854 // then use the list to do the fixing.
   855 //
   856 // Our constructed vtables:
   857 // Dump time:
   858 //  1. init_self_patching_vtbl_list: table of pointers to current virtual method addrs
   859 //  2. generate_vtable_methods: create jump table, appended to above vtbl_list
   860 //  3. PatchKlassVtables: for Klass list, patch the vtable entry to point to jump table
   861 //     rather than to current vtbl
   862 // Table layout: NOTE FIXED SIZE
   863 //   1. vtbl pointers
   864 //   2. #Klass X #virtual methods per Klass
   865 //   1 entry for each, in the order:
   866 //   Klass1:method1 entry, Klass1:method2 entry, ... Klass1:method<num_virtuals> entry
   867 //   Klass2:method1 entry, Klass2:method2 entry, ... Klass2:method<num_virtuals> entry
   868 //   ...
   869 //   Klass<vtbl_list_size>:method1 entry, Klass<vtbl_list_size>:method2 entry,
   870 //       ... Klass<vtbl_list_size>:method<num_virtuals> entry
   871 //  Sample entry: (Sparc):
   872 //   save(sp, -256, sp)
   873 //   ba,pt common_code
   874 //   mov XXX, %L0       %L0 gets: Klass index <<8 + method index (note: max method index 255)
   875 //
   876 // Restore time:
   877 //   1. initialize_oops: reserve space for table
   878 //   2. init_self_patching_vtbl_list: update pointers to NEW virtual method addrs in text
   879 //
   880 // Execution time:
   881 //   First virtual method call for any object of these Klass types:
   882 //   1. object->klass->klass_part
   883 //   2. vtable entry for that klass_part points to the jump table entries
   884 //   3. branches to common_code with %O0/klass_part, %L0: Klass index <<8 + method index
   885 //   4. common_code:
   886 //      Get address of new vtbl pointer for this Klass from updated table
   887 //      Update new vtbl pointer in the Klass: future virtual calls go direct
   888 //      Jump to method, using new vtbl pointer and method index
   890 class PatchKlassVtables: public ObjectClosure {
   891 private:
   892   GrowableArray<klassOop>* _klass_objects;
   894 public:
   895   PatchKlassVtables() {
   896     _klass_objects = new GrowableArray<klassOop>();
   897   }
   899   void do_object(oop obj) {
   900     if (obj->is_klass()) {
   901       _klass_objects->append(klassOop(obj));
   902     }
   903   }
   905   void patch(void** vtbl_list, void* new_vtable_start) {
   906     int n = _klass_objects->length();
   907     for (int i = 0; i < n; i++) {
   908       klassOop obj = (klassOop)_klass_objects->at(i);
   909       Klass* k = obj->klass_part();
   910       *(void**)k = CompactingPermGenGen::find_matching_vtbl_ptr(
   911                      vtbl_list, new_vtable_start, k);
   912     }
   913   }
   914 };
   916 // Walk through all symbols and patch their vtable pointers.
   917 // Note that symbols have vtable pointers only in non-product builds
   918 // (see allocation.hpp).
   920 #ifndef PRODUCT
   921 class PatchSymbolVtables: public SymbolClosure {
   922 private:
   923   void* _new_vtbl_ptr;
   925 public:
   926   PatchSymbolVtables(void** vtbl_list, void* new_vtable_start) {
   927     Symbol s;
   928     _new_vtbl_ptr = CompactingPermGenGen::find_matching_vtbl_ptr(
   929                       vtbl_list, new_vtable_start, &s);
   930   }
   932   void do_symbol(Symbol** p) {
   933     Symbol* sym = load_symbol(p);
   934     *(void**)sym = _new_vtbl_ptr;
   935   }
   936 };
   937 #endif
   940 // Populate the shared space.
   942 class VM_PopulateDumpSharedSpace: public VM_Operation {
   943 private:
   944   GrowableArray<oop> *_class_promote_order;
   945   OffsetTableContigSpace* _ro_space;
   946   OffsetTableContigSpace* _rw_space;
   947   VirtualSpace* _md_vs;
   948   VirtualSpace* _mc_vs;
   950 public:
   951   VM_PopulateDumpSharedSpace(GrowableArray<oop> *class_promote_order,
   952                              OffsetTableContigSpace* ro_space,
   953                              OffsetTableContigSpace* rw_space,
   954                              VirtualSpace* md_vs, VirtualSpace* mc_vs) {
   955     _class_promote_order = class_promote_order;
   956     _ro_space = ro_space;
   957     _rw_space = rw_space;
   958     _md_vs = md_vs;
   959     _mc_vs = mc_vs;
   960   }
   962   VMOp_Type type() const { return VMOp_PopulateDumpSharedSpace; }
   963   void doit() {
   964     Thread* THREAD = VMThread::vm_thread();
   965     NOT_PRODUCT(SystemDictionary::verify();)
   966     // The following guarantee is meant to ensure that no loader constraints
   967     // exist yet, since the constraints table is not shared.  This becomes
   968     // more important now that we don't re-initialize vtables/itables for
   969     // shared classes at runtime, where constraints were previously created.
   970     guarantee(SystemDictionary::constraints()->number_of_entries() == 0,
   971               "loader constraints are not saved");
   972     // Revisit and implement this if we prelink method handle call sites:
   973     guarantee(SystemDictionary::invoke_method_table() == NULL ||
   974               SystemDictionary::invoke_method_table()->number_of_entries() == 0,
   975               "invoke method table is not saved");
   976     GenCollectedHeap* gch = GenCollectedHeap::heap();
   978     // At this point, many classes have been loaded.
   980     // Update all the fingerprints in the shared methods.
   982     tty->print("Calculating fingerprints ... ");
   983     FingerprintMethodsClosure fpmc;
   984     gch->object_iterate(&fpmc);
   985     tty->print_cr("done. ");
   987     // Remove all references outside the heap.
   989     tty->print("Removing unshareable information ... ");
   990     RemoveUnshareableInfoClosure ruic;
   991     gch->object_iterate(&ruic);
   992     tty->print_cr("done. ");
   994     // Move the objects in three passes.
   996     MarkObjectsOopClosure mark_all;
   997     MarkCommonReadOnly mark_common_ro;
   998     MarkStringValues mark_string_values;
   999     MarkReadWriteObjects mark_rw;
  1000     MarkStringObjects mark_strings;
  1001     MoveMarkedObjects move_ro(_ro_space, true);
  1002     MoveMarkedObjects move_rw(_rw_space, false);
  1004     // The SharedOptimizeColdStart VM option governs the new layout
  1005     // algorithm for promoting classes into the shared archive.
  1006     // The general idea is to minimize cold start time by laying
  1007     // out the objects in the order they are accessed at startup time.
  1008     // By doing this we are trying to eliminate out-of-order accesses
  1009     // in the shared archive.  This benefits cold startup time by making
  1010     // disk reads as sequential as possible during class loading and
  1011     // bootstrapping activities.  There may also be a small secondary
  1012     // effect of better "packing" of more commonly used data on a smaller
  1013     // number of pages, although no direct benefit has been measured from
  1014     // this effect.
  1015     //
  1016     // At the class level of granularity, the promotion order is dictated
  1017     // by the classlist file whose generation is discussed elsewhere.
  1018     //
  1019     // At smaller granularity, optimal ordering was determined by an
  1020     // offline analysis of object access order in the shared archive.
  1021     // The dbx watchpoint facility, combined with SA post-processing,
  1022     // was used to observe common access patterns primarily during
  1023     // classloading.  This information was used to craft the promotion
  1024     // order seen in the following closures.
  1025     //
  1026     // The observed access order is mostly governed by what happens
  1027     // in SystemDictionary::load_shared_class().  NOTE WELL - care
  1028     // should be taken when making changes to this method, because it
  1029     // may invalidate assumptions made about access order!
  1030     //
  1031     // (Ideally, there would be a better way to manage changes to
  1032     //  the access order.  Unfortunately a generic in-VM solution for
  1033     //  dynamically observing access order and optimizing shared
  1034     //  archive layout is pretty difficult.  We go with the static
  1035     //  analysis because the code is fairly mature at this point
  1036     //  and we're betting that the access order won't change much.)
  1038     MarkAndMoveOrderedReadOnly  mark_and_move_ordered_ro(&move_ro);
  1039     MarkAndMoveOrderedReadWrite mark_and_move_ordered_rw(&move_rw);
  1041     // Set up the share data and shared code segments.
  1043     char* md_top = _md_vs->low();
  1044     char* md_end = _md_vs->high();
  1045     char* mc_top = _mc_vs->low();
  1046     char* mc_end = _mc_vs->high();
  1048     // Reserve space for the list of klassOops whose vtables are used
  1049     // for patching others as needed.
  1051     void** vtbl_list = (void**)md_top;
  1052     int vtbl_list_size = CompactingPermGenGen::vtbl_list_size;
  1053     Universe::init_self_patching_vtbl_list(vtbl_list, vtbl_list_size);
  1055     md_top += vtbl_list_size * sizeof(void*);
  1056     void* vtable = md_top;
  1058     // Reserve space for a new dummy vtable for klass objects in the
  1059     // heap.  Generate self-patching vtable entries.
  1061     CompactingPermGenGen::generate_vtable_methods(vtbl_list,
  1062                                                   &vtable,
  1063                                                   &md_top, md_end,
  1064                                                   &mc_top, mc_end);
  1066     // Reserve space for the total size and the number of stored symbols.
  1068     md_top += sizeof(intptr_t) * 2;
  1070     MoveSymbols move_symbols(md_top, md_end);
  1071     CommonSymbolsClosure traverse_common_symbols(&move_symbols);
  1073     // Phase 1a: remove symbols with _refcount == 0
  1075     SymbolTable::unlink();
  1077     // Phase 1b: move commonly used symbols referenced by oop fields.
  1079     tty->print("Moving common symbols to metadata section at " PTR_FORMAT " ... ",
  1080                move_symbols.get_top());
  1081     gch->object_iterate(&traverse_common_symbols);
  1082     tty->print_cr("done. ");
  1084     // Phase 1c: move known names and signatures.
  1086     tty->print("Moving vmSymbols to metadata section at " PTR_FORMAT " ... ",
  1087                move_symbols.get_top());
  1088     vmSymbols::symbols_do(&move_symbols);
  1089     tty->print_cr("done. ");
  1091     // Phase 1d: move the remaining symbols by scanning the whole SymbolTable.
  1093     void* extra_symbols = move_symbols.get_top();
  1094     tty->print("Moving the remaining symbols to metadata section at " PTR_FORMAT " ... ",
  1095                move_symbols.get_top());
  1096     SymbolTable::symbols_do(&move_symbols);
  1097     tty->print_cr("done. ");
  1099     // Record the total length of all symbols at the beginning of the block.
  1100     ((intptr_t*)md_top)[-2] = move_symbols.get_top() - md_top;
  1101     ((intptr_t*)md_top)[-1] = move_symbols.count();
  1102     tty->print_cr("Moved %d symbols, %d bytes.",
  1103                   move_symbols.count(), move_symbols.get_top() - md_top);
  1104     // Advance the pointer to the end of symbol store.
  1105     md_top = move_symbols.get_top();
  1108     // Phase 2: move commonly used read-only objects to the read-only space.
  1110     if (SharedOptimizeColdStart) {
  1111       tty->print("Moving pre-ordered read-only objects to shared space at " PTR_FORMAT " ... ",
  1112                  _ro_space->top());
  1113       for (int i = 0; i < _class_promote_order->length(); i++) {
  1114         oop obj = _class_promote_order->at(i);
  1115         mark_and_move_ordered_ro.do_object(obj);
  1117       tty->print_cr("done. ");
  1120     tty->print("Moving read-only objects to shared space at " PTR_FORMAT " ... ",
  1121                _ro_space->top());
  1122     gch->object_iterate(&mark_common_ro);
  1123     gch->object_iterate(&move_ro);
  1124     tty->print_cr("done. ");
  1126     // Phase 3: move String character arrays to the read-only space.
  1128     tty->print("Moving string char arrays to shared space at " PTR_FORMAT " ... ",
  1129                _ro_space->top());
  1130     gch->object_iterate(&mark_string_values);
  1131     gch->object_iterate(&move_ro);
  1132     tty->print_cr("done. ");
  1134     // Phase 4: move read-write objects to the read-write space, except
  1135     // Strings.
  1137     if (SharedOptimizeColdStart) {
  1138       tty->print("Moving pre-ordered read-write objects to shared space at " PTR_FORMAT " ... ",
  1139                  _rw_space->top());
  1140       for (int i = 0; i < _class_promote_order->length(); i++) {
  1141         oop obj = _class_promote_order->at(i);
  1142         mark_and_move_ordered_rw.do_object(obj);
  1144       tty->print_cr("done. ");
  1146     tty->print("Moving read-write objects to shared space at " PTR_FORMAT " ... ",
  1147                _rw_space->top());
  1148     Universe::oops_do(&mark_all, true);
  1149     SystemDictionary::oops_do(&mark_all);
  1150     oop tmp = Universe::arithmetic_exception_instance();
  1151     mark_object(java_lang_Throwable::message(tmp));
  1152     gch->object_iterate(&mark_rw);
  1153     gch->object_iterate(&move_rw);
  1154     tty->print_cr("done. ");
  1156     // Phase 5: move String objects to the read-write space.
  1158     tty->print("Moving String objects to shared space at " PTR_FORMAT " ... ",
  1159                _rw_space->top());
  1160     StringTable::oops_do(&mark_all);
  1161     gch->object_iterate(&mark_strings);
  1162     gch->object_iterate(&move_rw);
  1163     tty->print_cr("done. ");
  1164     tty->print_cr("Read-write space ends at " PTR_FORMAT ", %d bytes.",
  1165                   _rw_space->top(), _rw_space->used());
  1167 #ifdef DEBUG
  1168     // Check: scan for objects which were not moved.
  1170     CheckRemainingObjects check_objects;
  1171     gch->object_iterate(&check_objects);
  1172     check_objects.status();
  1173 #endif
  1175     // Resolve forwarding in objects and saved C++ structures
  1176     tty->print("Updating references to shared objects ... ");
  1177     ResolveForwardingClosure resolve;
  1178     Universe::oops_do(&resolve);
  1179     SystemDictionary::oops_do(&resolve);
  1180     StringTable::oops_do(&resolve);
  1182     // Fix (forward) all of the references in these shared objects (which
  1183     // are required to point ONLY to objects in the shared spaces).
  1184     // Also, create a list of all objects which might later contain a
  1185     // reference to a younger generation object.
  1187     CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
  1188     PatchOopsClosure patch(THREAD);
  1189     gen->ro_space()->object_iterate(&patch);
  1190     gen->rw_space()->object_iterate(&patch);
  1192     // Previously method sorting was done concurrently with forwarding
  1193     // pointer resolution in the shared spaces.  This imposed an ordering
  1194     // restriction in that methods were required to be promoted/patched
  1195     // before their holder classes.  (Because constant pool pointers in
  1196     // methodKlasses are required to be resolved before their holder class
  1197     // is visited for sorting, otherwise methods are sorted by incorrect,
  1198     // pre-forwarding addresses.)
  1199     //
  1200     // Now, we reorder methods as a separate step after ALL forwarding
  1201     // pointer resolution, so that methods can be promoted in any order
  1202     // with respect to their holder classes.
  1204     SortMethodsClosure sort;
  1205     gen->ro_space()->object_iterate(&sort);
  1206     gen->rw_space()->object_iterate(&sort);
  1208     ReinitializeItables reinit_itables(THREAD);
  1209     gen->ro_space()->object_iterate(&reinit_itables);
  1210     gen->rw_space()->object_iterate(&reinit_itables);
  1211     tty->print_cr("done. ");
  1212     tty->cr();
  1214     // Reorder the system dictionary.  (Moving the symbols opps affects
  1215     // how the hash table indices are calculated.)
  1217     SystemDictionary::reorder_dictionary();
  1219     // Empty the non-shared heap (because most of the objects were
  1220     // copied out, and the remainder cannot be considered valid oops).
  1222     ClearSpaceClosure csc;
  1223     for (int i = 0; i < gch->n_gens(); ++i) {
  1224       gch->get_gen(i)->space_iterate(&csc);
  1226     csc.do_space(gen->the_space());
  1227     NOT_PRODUCT(SystemDictionary::verify();)
  1229     // Copy the String table, the symbol table, and the system
  1230     // dictionary to the shared space in usable form.  Copy the hastable
  1231     // buckets first [read-write], then copy the linked lists of entries
  1232     // [read-only].
  1234     SymbolTable::reverse(extra_symbols);
  1235     NOT_PRODUCT(SymbolTable::verify());
  1236     SymbolTable::copy_buckets(&md_top, md_end);
  1238     StringTable::reverse();
  1239     NOT_PRODUCT(StringTable::verify());
  1240     StringTable::copy_buckets(&md_top, md_end);
  1242     SystemDictionary::reverse();
  1243     SystemDictionary::copy_buckets(&md_top, md_end);
  1245     ClassLoader::verify();
  1246     ClassLoader::copy_package_info_buckets(&md_top, md_end);
  1247     ClassLoader::verify();
  1249     SymbolTable::copy_table(&md_top, md_end);
  1250     StringTable::copy_table(&md_top, md_end);
  1251     SystemDictionary::copy_table(&md_top, md_end);
  1252     ClassLoader::verify();
  1253     ClassLoader::copy_package_info_table(&md_top, md_end);
  1254     ClassLoader::verify();
  1256     // Print debug data.
  1258     if (PrintSharedSpaces) {
  1259       const char* fmt = "%s space: " PTR_FORMAT " out of " PTR_FORMAT " bytes allocated at " PTR_FORMAT ".";
  1260       tty->print_cr(fmt, "ro", _ro_space->used(), _ro_space->capacity(),
  1261                     _ro_space->bottom());
  1262       tty->print_cr(fmt, "rw", _rw_space->used(), _rw_space->capacity(),
  1263                     _rw_space->bottom());
  1266     // Write the oop data to the output array.
  1268     WriteClosure wc(md_top, md_end);
  1269     CompactingPermGenGen::serialize_oops(&wc);
  1270     md_top = wc.get_top();
  1272     // Update the vtable pointers in all of the Klass objects in the
  1273     // heap. They should point to newly generated vtable.
  1275     PatchKlassVtables pkvt;
  1276     _rw_space->object_iterate(&pkvt);
  1277     pkvt.patch(vtbl_list, vtable);
  1279 #ifndef PRODUCT
  1280     // Update the vtable pointers in all symbols,
  1281     // but only in non-product builds where symbols DO have virtual methods.
  1282     PatchSymbolVtables psvt(vtbl_list, vtable);
  1283     SymbolTable::symbols_do(&psvt);
  1284 #endif
  1286     char* saved_vtbl = (char*)malloc(vtbl_list_size * sizeof(void*));
  1287     memmove(saved_vtbl, vtbl_list, vtbl_list_size * sizeof(void*));
  1288     memset(vtbl_list, 0, vtbl_list_size * sizeof(void*));
  1290     // Create and write the archive file that maps the shared spaces.
  1292     FileMapInfo* mapinfo = new FileMapInfo();
  1293     mapinfo->populate_header(gch->gen_policy()->max_alignment());
  1295     // Pass 1 - update file offsets in header.
  1296     mapinfo->write_header();
  1297     mapinfo->write_space(CompactingPermGenGen::ro, _ro_space, true);
  1298     _ro_space->set_saved_mark();
  1299     mapinfo->write_space(CompactingPermGenGen::rw, _rw_space, false);
  1300     _rw_space->set_saved_mark();
  1301     mapinfo->write_region(CompactingPermGenGen::md, _md_vs->low(),
  1302                           pointer_delta(md_top, _md_vs->low(), sizeof(char)),
  1303                           SharedMiscDataSize,
  1304                           false, false);
  1305     mapinfo->write_region(CompactingPermGenGen::mc, _mc_vs->low(),
  1306                           pointer_delta(mc_top, _mc_vs->low(), sizeof(char)),
  1307                           SharedMiscCodeSize,
  1308                           true, true);
  1310     // Pass 2 - write data.
  1311     mapinfo->open_for_write();
  1312     mapinfo->write_header();
  1313     mapinfo->write_space(CompactingPermGenGen::ro, _ro_space, true);
  1314     mapinfo->write_space(CompactingPermGenGen::rw, _rw_space, false);
  1315     mapinfo->write_region(CompactingPermGenGen::md, _md_vs->low(),
  1316                           pointer_delta(md_top, _md_vs->low(), sizeof(char)),
  1317                           SharedMiscDataSize,
  1318                           false, false);
  1319     mapinfo->write_region(CompactingPermGenGen::mc, _mc_vs->low(),
  1320                           pointer_delta(mc_top, _mc_vs->low(), sizeof(char)),
  1321                           SharedMiscCodeSize,
  1322                           true, true);
  1323     mapinfo->close();
  1325     // Summarize heap.
  1326     memmove(vtbl_list, saved_vtbl, vtbl_list_size * sizeof(void*));
  1327     print_contents();
  1329 }; // class VM_PopulateDumpSharedSpace
  1332 // Populate the shared spaces and dump to a file.
  1334 jint CompactingPermGenGen::dump_shared(GrowableArray<oop>* class_promote_order, TRAPS) {
  1335   GenCollectedHeap* gch = GenCollectedHeap::heap();
  1337   // Calculate hash values for all of the (interned) strings to avoid
  1338   // writes to shared pages in the future.
  1340   tty->print("Calculating hash values for String objects .. ");
  1341   StringHashCodeClosure shcc(THREAD);
  1342   StringTable::oops_do(&shcc);
  1343   tty->print_cr("done. ");
  1345   CompactingPermGenGen* gen = (CompactingPermGenGen*)gch->perm_gen();
  1346   VM_PopulateDumpSharedSpace op(class_promote_order,
  1347                                 gen->ro_space(), gen->rw_space(),
  1348                                 gen->md_space(), gen->mc_space());
  1349   VMThread::execute(&op);
  1350   return JNI_OK;
  1353 void* CompactingPermGenGen::find_matching_vtbl_ptr(void** vtbl_list,
  1354                                                    void* new_vtable_start,
  1355                                                    void* obj) {
  1356   void* old_vtbl_ptr = *(void**)obj;
  1357   for (int i = 0; i < vtbl_list_size; i++) {
  1358     if (vtbl_list[i] == old_vtbl_ptr) {
  1359       return (void**)new_vtable_start + i * num_virtuals;
  1362   ShouldNotReachHere();
  1363   return NULL;
  1367 class LinkClassesClosure : public ObjectClosure {
  1368  private:
  1369   Thread* THREAD;
  1371  public:
  1372   LinkClassesClosure(Thread* thread) : THREAD(thread) {}
  1374   void do_object(oop obj) {
  1375     if (obj->is_klass()) {
  1376       Klass* k = Klass::cast((klassOop) obj);
  1377       if (k->oop_is_instance()) {
  1378         instanceKlass* ik = (instanceKlass*) k;
  1379         // Link the class to cause the bytecodes to be rewritten and the
  1380         // cpcache to be created.
  1381         if (ik->get_init_state() < instanceKlass::linked) {
  1382           ik->link_class(THREAD);
  1383           guarantee(!HAS_PENDING_EXCEPTION, "exception in class rewriting");
  1386         // Create String objects from string initializer symbols.
  1387         ik->constants()->resolve_string_constants(THREAD);
  1388         guarantee(!HAS_PENDING_EXCEPTION, "exception resolving string constants");
  1392 };
  1395 // Support for a simple checksum of the contents of the class list
  1396 // file to prevent trivial tampering. The algorithm matches that in
  1397 // the MakeClassList program used by the J2SE build process.
  1398 #define JSUM_SEED ((jlong)CONST64(0xcafebabebabecafe))
  1399 static jlong
  1400 jsum(jlong start, const char *buf, const int len)
  1402     jlong h = start;
  1403     char *p = (char *)buf, *e = p + len;
  1404     while (p < e) {
  1405         char c = *p++;
  1406         if (c <= ' ') {
  1407             /* Skip spaces and control characters */
  1408             continue;
  1410         h = 31 * h + c;
  1412     return h;
  1419 // Preload classes from a list, populate the shared spaces and dump to a
  1420 // file.
  1422 void GenCollectedHeap::preload_and_dump(TRAPS) {
  1423   TraceTime timer("Dump Shared Spaces", TraceStartupTime);
  1424   ResourceMark rm;
  1426   // Preload classes to be shared.
  1427   // Should use some os:: method rather than fopen() here. aB.
  1428   // Construct the path to the class list (in jre/lib)
  1429   // Walk up two directories from the location of the VM and
  1430   // optionally tack on "lib" (depending on platform)
  1431   char class_list_path[JVM_MAXPATHLEN];
  1432   os::jvm_path(class_list_path, sizeof(class_list_path));
  1433   for (int i = 0; i < 3; i++) {
  1434     char *end = strrchr(class_list_path, *os::file_separator());
  1435     if (end != NULL) *end = '\0';
  1437   int class_list_path_len = (int)strlen(class_list_path);
  1438   if (class_list_path_len >= 3) {
  1439     if (strcmp(class_list_path + class_list_path_len - 3, "lib") != 0) {
  1440       strcat(class_list_path, os::file_separator());
  1441       strcat(class_list_path, "lib");
  1444   strcat(class_list_path, os::file_separator());
  1445   strcat(class_list_path, "classlist");
  1447   FILE* file = fopen(class_list_path, "r");
  1448   if (file != NULL) {
  1449     jlong computed_jsum  = JSUM_SEED;
  1450     jlong file_jsum      = 0;
  1452     char class_name[256];
  1453     int class_count = 0;
  1454     GenCollectedHeap* gch = GenCollectedHeap::heap();
  1455     gch->_preloading_shared_classes = true;
  1456     GrowableArray<oop>* class_promote_order = new GrowableArray<oop>();
  1458     // Preload (and intern) strings which will be used later.
  1460     StringTable::intern("main", THREAD);
  1461     StringTable::intern("([Ljava/lang/String;)V", THREAD);
  1462     StringTable::intern("Ljava/lang/Class;", THREAD);
  1464     StringTable::intern("I", THREAD);   // Needed for StringBuffer persistence?
  1465     StringTable::intern("Z", THREAD);   // Needed for StringBuffer persistence?
  1467     // sun.io.Converters
  1468     static const char obj_array_sig[] = "[[Ljava/lang/Object;";
  1469     SymbolTable::lookup(obj_array_sig, (int)strlen(obj_array_sig), THREAD);
  1471     // java.util.HashMap
  1472     static const char map_entry_array_sig[] = "[Ljava/util/Map$Entry;";
  1473     SymbolTable::lookup(map_entry_array_sig, (int)strlen(map_entry_array_sig),
  1474                         THREAD);
  1476     tty->print("Loading classes to share ... ");
  1477     while ((fgets(class_name, sizeof class_name, file)) != NULL) {
  1478       if (*class_name == '#') {
  1479         jint fsh, fsl;
  1480         if (sscanf(class_name, "# %8x%8x\n", &fsh, &fsl) == 2) {
  1481           file_jsum = ((jlong)(fsh) << 32) | (fsl & 0xffffffff);
  1484         continue;
  1486       // Remove trailing newline
  1487       size_t name_len = strlen(class_name);
  1488       class_name[name_len-1] = '\0';
  1490       computed_jsum = jsum(computed_jsum, class_name, (const int)name_len - 1);
  1492       // Got a class name - load it.
  1493       TempNewSymbol class_name_symbol = SymbolTable::new_symbol(class_name, THREAD);
  1494       guarantee(!HAS_PENDING_EXCEPTION, "Exception creating a symbol.");
  1495       klassOop klass = SystemDictionary::resolve_or_null(class_name_symbol,
  1496                                                          THREAD);
  1497       guarantee(!HAS_PENDING_EXCEPTION, "Exception resolving a class.");
  1498       if (klass != NULL) {
  1499         if (PrintSharedSpaces) {
  1500           tty->print_cr("Shared spaces preloaded: %s", class_name);
  1504         instanceKlass* ik = instanceKlass::cast(klass);
  1506         // Should be class load order as per -XX:+TraceClassLoadingPreorder
  1507         class_promote_order->append(ik->as_klassOop());
  1509         // Link the class to cause the bytecodes to be rewritten and the
  1510         // cpcache to be created. The linking is done as soon as classes
  1511         // are loaded in order that the related data structures (klass,
  1512         // cpCache, Sting constants) are located together.
  1514         if (ik->get_init_state() < instanceKlass::linked) {
  1515           ik->link_class(THREAD);
  1516           guarantee(!(HAS_PENDING_EXCEPTION), "exception in class rewriting");
  1519         // Create String objects from string initializer symbols.
  1521         ik->constants()->resolve_string_constants(THREAD);
  1523         class_count++;
  1524       } else {
  1525         if (PrintSharedSpaces) {
  1526           tty->cr();
  1527           tty->print_cr(" Preload failed: %s", class_name);
  1530       file_jsum = 0; // Checksum must be on last line of file
  1532     if (computed_jsum != file_jsum) {
  1533       tty->cr();
  1534       tty->print_cr("Preload failed: checksum of class list was incorrect.");
  1535       exit(1);
  1538     tty->print_cr("done. ");
  1540     if (PrintSharedSpaces) {
  1541       tty->print_cr("Shared spaces: preloaded %d classes", class_count);
  1544     // Rewrite and unlink classes.
  1545     tty->print("Rewriting and unlinking classes ... ");
  1546     // Make heap parsable
  1547     ensure_parsability(false); // arg is actually don't care
  1549     // Link any classes which got missed.  (It's not quite clear why
  1550     // they got missed.)  This iteration would be unsafe if we weren't
  1551     // single-threaded at this point; however we can't do it on the VM
  1552     // thread because it requires object allocation.
  1553     LinkClassesClosure lcc(Thread::current());
  1554     object_iterate(&lcc);
  1555     ensure_parsability(false); // arg is actually don't care
  1556     tty->print_cr("done. ");
  1558     // Create and dump the shared spaces.
  1559     jint err = CompactingPermGenGen::dump_shared(class_promote_order, THREAD);
  1560     if (err != JNI_OK) {
  1561       fatal("Dumping shared spaces failed.");
  1564   } else {
  1565     char errmsg[JVM_MAXPATHLEN];
  1566     os::lasterror(errmsg, JVM_MAXPATHLEN);
  1567     tty->print_cr("Loading classlist failed: %s", errmsg);
  1568     exit(1);
  1571   // Since various initialization steps have been undone by this process,
  1572   // it is not reasonable to continue running a java process.
  1573   exit(0);

mercurial