src/share/vm/gc_implementation/g1/g1MarkSweep.cpp

Tue, 14 Jun 2011 11:01:10 -0700

author
johnc
date
Tue, 14 Jun 2011 11:01:10 -0700
changeset 2969
6747fd0512e0
parent 2643
1216415d8e35
child 3175
4dfb2df418f2
permissions
-rw-r--r--

7004681: G1: Extend marking verification to Full GCs
Summary: Perform a heap verification after the first phase of G1's full GC using objects' mark words to determine liveness. The third parameter of the heap verification routines, which was used in G1 to determine which marking bitmap to use in liveness calculations, has been changed from a boolean to an enum with values defined for using the mark word, and the 'prev' and 'next' bitmaps.
Reviewed-by: tonyp, ysr

     1 /*
     2  * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/symbolTable.hpp"
    28 #include "classfile/systemDictionary.hpp"
    29 #include "classfile/vmSymbols.hpp"
    30 #include "code/codeCache.hpp"
    31 #include "code/icBuffer.hpp"
    32 #include "gc_implementation/g1/g1MarkSweep.hpp"
    33 #include "memory/gcLocker.hpp"
    34 #include "memory/genCollectedHeap.hpp"
    35 #include "memory/modRefBarrierSet.hpp"
    36 #include "memory/referencePolicy.hpp"
    37 #include "memory/space.hpp"
    38 #include "oops/instanceRefKlass.hpp"
    39 #include "oops/oop.inline.hpp"
    40 #include "prims/jvmtiExport.hpp"
    41 #include "runtime/aprofiler.hpp"
    42 #include "runtime/biasedLocking.hpp"
    43 #include "runtime/fprofiler.hpp"
    44 #include "runtime/synchronizer.hpp"
    45 #include "runtime/thread.hpp"
    46 #include "runtime/vmThread.hpp"
    47 #include "utilities/copy.hpp"
    48 #include "utilities/events.hpp"
    50 class HeapRegion;
    52 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
    53                                       bool clear_all_softrefs) {
    54   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
    56   SharedHeap* sh = SharedHeap::heap();
    57 #ifdef ASSERT
    58   if (sh->collector_policy()->should_clear_all_soft_refs()) {
    59     assert(clear_all_softrefs, "Policy should have been checked earler");
    60   }
    61 #endif
    62   // hook up weak ref data so it can be used during Mark-Sweep
    63   assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
    64   assert(rp != NULL, "should be non-NULL");
    65   GenMarkSweep::_ref_processor = rp;
    66   rp->setup_policy(clear_all_softrefs);
    68   // When collecting the permanent generation methodOops may be moving,
    69   // so we either have to flush all bcp data or convert it into bci.
    70   CodeCache::gc_prologue();
    71   Threads::gc_prologue();
    73   // Increment the invocation count for the permanent generation, since it is
    74   // implicitly collected whenever we do a full mark sweep collection.
    75   sh->perm_gen()->stat_record()->invocations++;
    77   bool marked_for_unloading = false;
    79   allocate_stacks();
    81   // We should save the marks of the currently locked biased monitors.
    82   // The marking doesn't preserve the marks of biased objects.
    83   BiasedLocking::preserve_marks();
    85   mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
    87   mark_sweep_phase2();
    89   // Don't add any more derived pointers during phase3
    90   COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
    92   mark_sweep_phase3();
    94   mark_sweep_phase4();
    96   GenMarkSweep::restore_marks();
    97   BiasedLocking::restore_marks();
    98   GenMarkSweep::deallocate_stacks();
   100   // We must invalidate the perm-gen rs, so that it gets rebuilt.
   101   GenRemSet* rs = sh->rem_set();
   102   rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/);
   104   // "free at last gc" is calculated from these.
   105   // CHF: cheating for now!!!
   106   //  Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
   107   //  Universe::set_heap_used_at_last_gc(Universe::heap()->used());
   109   Threads::gc_epilogue();
   110   CodeCache::gc_epilogue();
   111   JvmtiExport::gc_epilogue();
   113   // refs processing: clean slate
   114   GenMarkSweep::_ref_processor = NULL;
   115 }
   118 void G1MarkSweep::allocate_stacks() {
   119   GenMarkSweep::_preserved_count_max = 0;
   120   GenMarkSweep::_preserved_marks = NULL;
   121   GenMarkSweep::_preserved_count = 0;
   122 }
   124 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
   125                                     bool clear_all_softrefs) {
   126   // Recursively traverse all live objects and mark them
   127   EventMark m("1 mark object");
   128   TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
   129   GenMarkSweep::trace(" 1");
   131   SharedHeap* sh = SharedHeap::heap();
   133   sh->process_strong_roots(true,  // activeate StrongRootsScope
   134                            true,  // Collecting permanent generation.
   135                            SharedHeap::SO_SystemClasses,
   136                            &GenMarkSweep::follow_root_closure,
   137                            &GenMarkSweep::follow_code_root_closure,
   138                            &GenMarkSweep::follow_root_closure);
   140   // Process reference objects found during marking
   141   ReferenceProcessor* rp = GenMarkSweep::ref_processor();
   142   rp->setup_policy(clear_all_softrefs);
   143   rp->process_discovered_references(&GenMarkSweep::is_alive,
   144                                     &GenMarkSweep::keep_alive,
   145                                     &GenMarkSweep::follow_stack_closure,
   146                                     NULL);
   148   // Follow system dictionary roots and unload classes
   149   bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
   150   assert(GenMarkSweep::_marking_stack.is_empty(),
   151          "stack should be empty by now");
   153   // Follow code cache roots (has to be done after system dictionary,
   154   // assumes all live klasses are marked)
   155   CodeCache::do_unloading(&GenMarkSweep::is_alive,
   156                                    &GenMarkSweep::keep_alive,
   157                                    purged_class);
   158   GenMarkSweep::follow_stack();
   160   // Update subklass/sibling/implementor links of live klasses
   161   GenMarkSweep::follow_weak_klass_links();
   162   assert(GenMarkSweep::_marking_stack.is_empty(),
   163          "stack should be empty by now");
   165   // Visit memoized MDO's and clear any unmarked weak refs
   166   GenMarkSweep::follow_mdo_weak_refs();
   167   assert(GenMarkSweep::_marking_stack.is_empty(), "just drained");
   170   // Visit interned string tables and delete unmarked oops
   171   StringTable::unlink(&GenMarkSweep::is_alive);
   172   // Clean up unreferenced symbols in symbol table.
   173   SymbolTable::unlink();
   175   assert(GenMarkSweep::_marking_stack.is_empty(),
   176          "stack should be empty by now");
   178   if (VerifyDuringGC) {
   179     HandleMark hm;  // handle scope
   180     COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
   181     gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
   182     Universe::heap()->prepare_for_verify();
   183     // Note: we can verify only the heap here. When an object is
   184     // marked, the previous value of the mark word (including
   185     // identity hash values, ages, etc) is preserved, and the mark
   186     // word is set to markOop::marked_value - effectively removing
   187     // any hash values from the mark word. These hash values are
   188     // used when verifying the dictionaries and so removing them
   189     // from the mark word can make verification of the dictionaries
   190     // fail. At the end of the GC, the orginal mark word values
   191     // (including hash values) are restored to the appropriate
   192     // objects.
   193     Universe::heap()->verify(/* allow dirty */ true,
   194                              /* silent      */ false,
   195                              /* option      */ VerifyOption_G1UseMarkWord);
   197     G1CollectedHeap* g1h = G1CollectedHeap::heap();
   198     gclog_or_tty->print_cr("]");
   199   }
   200 }
   202 class G1PrepareCompactClosure: public HeapRegionClosure {
   203   G1CollectedHeap* _g1h;
   204   ModRefBarrierSet* _mrbs;
   205   CompactPoint _cp;
   206   HumongousRegionSet _humongous_proxy_set;
   208   void free_humongous_region(HeapRegion* hr) {
   209     HeapWord* end = hr->end();
   210     size_t dummy_pre_used;
   211     FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
   213     assert(hr->startsHumongous(),
   214            "Only the start of a humongous region should be freed.");
   215     _g1h->free_humongous_region(hr, &dummy_pre_used, &dummy_free_list,
   216                                 &_humongous_proxy_set, false /* par */);
   217     hr->prepare_for_compaction(&_cp);
   218     // Also clear the part of the card table that will be unused after
   219     // compaction.
   220     _mrbs->clear(MemRegion(hr->compaction_top(), end));
   221     dummy_free_list.remove_all();
   222   }
   224 public:
   225   G1PrepareCompactClosure(CompactibleSpace* cs)
   226   : _g1h(G1CollectedHeap::heap()),
   227     _mrbs(G1CollectedHeap::heap()->mr_bs()),
   228     _cp(NULL, cs, cs->initialize_threshold()),
   229     _humongous_proxy_set("G1MarkSweep Humongous Proxy Set") { }
   231   void update_sets() {
   232     // We'll recalculate total used bytes and recreate the free list
   233     // at the end of the GC, so no point in updating those values here.
   234     _g1h->update_sets_after_freeing_regions(0, /* pre_used */
   235                                             NULL, /* free_list */
   236                                             &_humongous_proxy_set,
   237                                             false /* par */);
   238   }
   240   bool doHeapRegion(HeapRegion* hr) {
   241     if (hr->isHumongous()) {
   242       if (hr->startsHumongous()) {
   243         oop obj = oop(hr->bottom());
   244         if (obj->is_gc_marked()) {
   245           obj->forward_to(obj);
   246         } else  {
   247           free_humongous_region(hr);
   248         }
   249       } else {
   250         assert(hr->continuesHumongous(), "Invalid humongous.");
   251       }
   252     } else {
   253       hr->prepare_for_compaction(&_cp);
   254       // Also clear the part of the card table that will be unused after
   255       // compaction.
   256       _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
   257     }
   258     return false;
   259   }
   260 };
   262 // Finds the first HeapRegion.
   263 class FindFirstRegionClosure: public HeapRegionClosure {
   264   HeapRegion* _a_region;
   265 public:
   266   FindFirstRegionClosure() : _a_region(NULL) {}
   267   bool doHeapRegion(HeapRegion* r) {
   268     _a_region = r;
   269     return true;
   270   }
   271   HeapRegion* result() { return _a_region; }
   272 };
   274 void G1MarkSweep::mark_sweep_phase2() {
   275   // Now all live objects are marked, compute the new object addresses.
   277   // It is imperative that we traverse perm_gen LAST. If dead space is
   278   // allowed a range of dead object may get overwritten by a dead int
   279   // array. If perm_gen is not traversed last a klassOop may get
   280   // overwritten. This is fine since it is dead, but if the class has dead
   281   // instances we have to skip them, and in order to find their size we
   282   // need the klassOop!
   283   //
   284   // It is not required that we traverse spaces in the same order in
   285   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
   286   // tracking expects us to do so. See comment under phase4.
   288   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   289   Generation* pg = g1h->perm_gen();
   291   EventMark m("2 compute new addresses");
   292   TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty);
   293   GenMarkSweep::trace("2");
   295   FindFirstRegionClosure cl;
   296   g1h->heap_region_iterate(&cl);
   297   HeapRegion *r = cl.result();
   298   CompactibleSpace* sp = r;
   299   if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
   300     sp = r->next_compaction_space();
   301   }
   303   G1PrepareCompactClosure blk(sp);
   304   g1h->heap_region_iterate(&blk);
   305   blk.update_sets();
   307   CompactPoint perm_cp(pg, NULL, NULL);
   308   pg->prepare_for_compaction(&perm_cp);
   309 }
   311 class G1AdjustPointersClosure: public HeapRegionClosure {
   312  public:
   313   bool doHeapRegion(HeapRegion* r) {
   314     if (r->isHumongous()) {
   315       if (r->startsHumongous()) {
   316         // We must adjust the pointers on the single H object.
   317         oop obj = oop(r->bottom());
   318         debug_only(GenMarkSweep::track_interior_pointers(obj));
   319         // point all the oops to the new location
   320         obj->adjust_pointers();
   321         debug_only(GenMarkSweep::check_interior_pointers());
   322       }
   323     } else {
   324       // This really ought to be "as_CompactibleSpace"...
   325       r->adjust_pointers();
   326     }
   327     return false;
   328   }
   329 };
   331 void G1MarkSweep::mark_sweep_phase3() {
   332   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   333   Generation* pg = g1h->perm_gen();
   335   // Adjust the pointers to reflect the new locations
   336   EventMark m("3 adjust pointers");
   337   TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty);
   338   GenMarkSweep::trace("3");
   340   SharedHeap* sh = SharedHeap::heap();
   342   sh->process_strong_roots(true,  // activate StrongRootsScope
   343                            true,  // Collecting permanent generation.
   344                            SharedHeap::SO_AllClasses,
   345                            &GenMarkSweep::adjust_root_pointer_closure,
   346                            NULL,  // do not touch code cache here
   347                            &GenMarkSweep::adjust_pointer_closure);
   349   g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
   351   // Now adjust pointers in remaining weak roots.  (All of which should
   352   // have been cleared if they pointed to non-surviving objects.)
   353   g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
   354                              &GenMarkSweep::adjust_pointer_closure);
   356   GenMarkSweep::adjust_marks();
   358   G1AdjustPointersClosure blk;
   359   g1h->heap_region_iterate(&blk);
   360   pg->adjust_pointers();
   361 }
   363 class G1SpaceCompactClosure: public HeapRegionClosure {
   364 public:
   365   G1SpaceCompactClosure() {}
   367   bool doHeapRegion(HeapRegion* hr) {
   368     if (hr->isHumongous()) {
   369       if (hr->startsHumongous()) {
   370         oop obj = oop(hr->bottom());
   371         if (obj->is_gc_marked()) {
   372           obj->init_mark();
   373         } else {
   374           assert(hr->is_empty(), "Should have been cleared in phase 2.");
   375         }
   376         hr->reset_during_compaction();
   377       }
   378     } else {
   379       hr->compact();
   380     }
   381     return false;
   382   }
   383 };
   385 void G1MarkSweep::mark_sweep_phase4() {
   386   // All pointers are now adjusted, move objects accordingly
   388   // It is imperative that we traverse perm_gen first in phase4. All
   389   // classes must be allocated earlier than their instances, and traversing
   390   // perm_gen first makes sure that all klassOops have moved to their new
   391   // location before any instance does a dispatch through it's klass!
   393   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
   394   // in the same order in phase2, phase3 and phase4. We don't quite do that
   395   // here (perm_gen first rather than last), so we tell the validate code
   396   // to use a higher index (saved from phase2) when verifying perm_gen.
   397   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   398   Generation* pg = g1h->perm_gen();
   400   EventMark m("4 compact heap");
   401   TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty);
   402   GenMarkSweep::trace("4");
   404   pg->compact();
   406   G1SpaceCompactClosure blk;
   407   g1h->heap_region_iterate(&blk);
   409 }
   411 // Local Variables: ***
   412 // c-indentation-style: gnu ***
   413 // End: ***

mercurial