src/share/vm/gc_implementation/g1/g1MarkSweep.cpp

Tue, 21 Aug 2012 14:10:39 -0700

author
johnc
date
Tue, 21 Aug 2012 14:10:39 -0700
changeset 3998
7383557659bd
parent 3957
a2f7274eb6ef
child 4037
da91efe96a93
permissions
-rw-r--r--

7185699: G1: Prediction model discrepancies
Summary: Correct the result value of G1CollectedHeap::pending_card_num(). Change the code that calculates the GC efficiency of a non-young heap region to use historical data from mixed GCs and the actual number of live bytes when predicting how long it would take to collect the region. Changes were also reviewed by Thomas Schatzl.
Reviewed-by: azeemj, brutisso

     1 /*
     2  * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/symbolTable.hpp"
    28 #include "classfile/systemDictionary.hpp"
    29 #include "classfile/vmSymbols.hpp"
    30 #include "code/codeCache.hpp"
    31 #include "code/icBuffer.hpp"
    32 #include "gc_implementation/g1/g1Log.hpp"
    33 #include "gc_implementation/g1/g1MarkSweep.hpp"
    34 #include "memory/gcLocker.hpp"
    35 #include "memory/genCollectedHeap.hpp"
    36 #include "memory/modRefBarrierSet.hpp"
    37 #include "memory/referencePolicy.hpp"
    38 #include "memory/space.hpp"
    39 #include "oops/instanceRefKlass.hpp"
    40 #include "oops/oop.inline.hpp"
    41 #include "prims/jvmtiExport.hpp"
    42 #include "runtime/aprofiler.hpp"
    43 #include "runtime/biasedLocking.hpp"
    44 #include "runtime/fprofiler.hpp"
    45 #include "runtime/synchronizer.hpp"
    46 #include "runtime/thread.hpp"
    47 #include "runtime/vmThread.hpp"
    48 #include "utilities/copy.hpp"
    49 #include "utilities/events.hpp"
    51 class HeapRegion;
    53 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
    54                                       bool clear_all_softrefs) {
    55   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
    57   SharedHeap* sh = SharedHeap::heap();
    58 #ifdef ASSERT
    59   if (sh->collector_policy()->should_clear_all_soft_refs()) {
    60     assert(clear_all_softrefs, "Policy should have been checked earler");
    61   }
    62 #endif
    63   // hook up weak ref data so it can be used during Mark-Sweep
    64   assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
    65   assert(rp != NULL, "should be non-NULL");
    66   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
    68   GenMarkSweep::_ref_processor = rp;
    69   rp->setup_policy(clear_all_softrefs);
    71   // When collecting the permanent generation methodOops may be moving,
    72   // so we either have to flush all bcp data or convert it into bci.
    73   CodeCache::gc_prologue();
    74   Threads::gc_prologue();
    76   // Increment the invocation count for the permanent generation, since it is
    77   // implicitly collected whenever we do a full mark sweep collection.
    78   sh->perm_gen()->stat_record()->invocations++;
    80   bool marked_for_unloading = false;
    82   allocate_stacks();
    84   // We should save the marks of the currently locked biased monitors.
    85   // The marking doesn't preserve the marks of biased objects.
    86   BiasedLocking::preserve_marks();
    88   mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
    90   mark_sweep_phase2();
    92   // Don't add any more derived pointers during phase3
    93   COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
    95   mark_sweep_phase3();
    97   mark_sweep_phase4();
    99   GenMarkSweep::restore_marks();
   100   BiasedLocking::restore_marks();
   101   GenMarkSweep::deallocate_stacks();
   103   // We must invalidate the perm-gen rs, so that it gets rebuilt.
   104   GenRemSet* rs = sh->rem_set();
   105   rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/);
   107   // "free at last gc" is calculated from these.
   108   // CHF: cheating for now!!!
   109   //  Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
   110   //  Universe::set_heap_used_at_last_gc(Universe::heap()->used());
   112   Threads::gc_epilogue();
   113   CodeCache::gc_epilogue();
   114   JvmtiExport::gc_epilogue();
   116   // refs processing: clean slate
   117   GenMarkSweep::_ref_processor = NULL;
   118 }
   121 void G1MarkSweep::allocate_stacks() {
   122   GenMarkSweep::_preserved_count_max = 0;
   123   GenMarkSweep::_preserved_marks = NULL;
   124   GenMarkSweep::_preserved_count = 0;
   125 }
   127 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
   128                                     bool clear_all_softrefs) {
   129   // Recursively traverse all live objects and mark them
   130   TraceTime tm("phase 1", G1Log::fine() && Verbose, true, gclog_or_tty);
   131   GenMarkSweep::trace(" 1");
   133   SharedHeap* sh = SharedHeap::heap();
   135   sh->process_strong_roots(true,  // activeate StrongRootsScope
   136                            true,  // Collecting permanent generation.
   137                            SharedHeap::SO_SystemClasses,
   138                            &GenMarkSweep::follow_root_closure,
   139                            &GenMarkSweep::follow_code_root_closure,
   140                            &GenMarkSweep::follow_root_closure);
   142   // Process reference objects found during marking
   143   ReferenceProcessor* rp = GenMarkSweep::ref_processor();
   144   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity");
   146   rp->setup_policy(clear_all_softrefs);
   147   rp->process_discovered_references(&GenMarkSweep::is_alive,
   148                                     &GenMarkSweep::keep_alive,
   149                                     &GenMarkSweep::follow_stack_closure,
   150                                     NULL);
   152   // Follow system dictionary roots and unload classes
   153   bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
   154   assert(GenMarkSweep::_marking_stack.is_empty(),
   155          "stack should be empty by now");
   157   // Follow code cache roots (has to be done after system dictionary,
   158   // assumes all live klasses are marked)
   159   CodeCache::do_unloading(&GenMarkSweep::is_alive,
   160                                    &GenMarkSweep::keep_alive,
   161                                    purged_class);
   162   GenMarkSweep::follow_stack();
   164   // Update subklass/sibling/implementor links of live klasses
   165   GenMarkSweep::follow_weak_klass_links();
   166   assert(GenMarkSweep::_marking_stack.is_empty(),
   167          "stack should be empty by now");
   169   // Visit memoized MDO's and clear any unmarked weak refs
   170   GenMarkSweep::follow_mdo_weak_refs();
   171   assert(GenMarkSweep::_marking_stack.is_empty(), "just drained");
   173   // Visit interned string tables and delete unmarked oops
   174   StringTable::unlink(&GenMarkSweep::is_alive);
   175   // Clean up unreferenced symbols in symbol table.
   176   SymbolTable::unlink();
   178   assert(GenMarkSweep::_marking_stack.is_empty(),
   179          "stack should be empty by now");
   181   if (VerifyDuringGC) {
   182     HandleMark hm;  // handle scope
   183     COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
   184     gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
   185     Universe::heap()->prepare_for_verify();
   186     // Note: we can verify only the heap here. When an object is
   187     // marked, the previous value of the mark word (including
   188     // identity hash values, ages, etc) is preserved, and the mark
   189     // word is set to markOop::marked_value - effectively removing
   190     // any hash values from the mark word. These hash values are
   191     // used when verifying the dictionaries and so removing them
   192     // from the mark word can make verification of the dictionaries
   193     // fail. At the end of the GC, the orginal mark word values
   194     // (including hash values) are restored to the appropriate
   195     // objects.
   196     Universe::heap()->verify(/* silent      */ false,
   197                              /* option      */ VerifyOption_G1UseMarkWord);
   199     G1CollectedHeap* g1h = G1CollectedHeap::heap();
   200     gclog_or_tty->print_cr("]");
   201   }
   202 }
   204 class G1PrepareCompactClosure: public HeapRegionClosure {
   205   G1CollectedHeap* _g1h;
   206   ModRefBarrierSet* _mrbs;
   207   CompactPoint _cp;
   208   HumongousRegionSet _humongous_proxy_set;
   210   void free_humongous_region(HeapRegion* hr) {
   211     HeapWord* end = hr->end();
   212     size_t dummy_pre_used;
   213     FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
   215     assert(hr->startsHumongous(),
   216            "Only the start of a humongous region should be freed.");
   217     _g1h->free_humongous_region(hr, &dummy_pre_used, &dummy_free_list,
   218                                 &_humongous_proxy_set, false /* par */);
   219     hr->prepare_for_compaction(&_cp);
   220     // Also clear the part of the card table that will be unused after
   221     // compaction.
   222     _mrbs->clear(MemRegion(hr->compaction_top(), end));
   223     dummy_free_list.remove_all();
   224   }
   226 public:
   227   G1PrepareCompactClosure(CompactibleSpace* cs)
   228   : _g1h(G1CollectedHeap::heap()),
   229     _mrbs(G1CollectedHeap::heap()->mr_bs()),
   230     _cp(NULL, cs, cs->initialize_threshold()),
   231     _humongous_proxy_set("G1MarkSweep Humongous Proxy Set") { }
   233   void update_sets() {
   234     // We'll recalculate total used bytes and recreate the free list
   235     // at the end of the GC, so no point in updating those values here.
   236     _g1h->update_sets_after_freeing_regions(0, /* pre_used */
   237                                             NULL, /* free_list */
   238                                             NULL, /* old_proxy_set */
   239                                             &_humongous_proxy_set,
   240                                             false /* par */);
   241   }
   243   bool doHeapRegion(HeapRegion* hr) {
   244     if (hr->isHumongous()) {
   245       if (hr->startsHumongous()) {
   246         oop obj = oop(hr->bottom());
   247         if (obj->is_gc_marked()) {
   248           obj->forward_to(obj);
   249         } else  {
   250           free_humongous_region(hr);
   251         }
   252       } else {
   253         assert(hr->continuesHumongous(), "Invalid humongous.");
   254       }
   255     } else {
   256       hr->prepare_for_compaction(&_cp);
   257       // Also clear the part of the card table that will be unused after
   258       // compaction.
   259       _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
   260     }
   261     return false;
   262   }
   263 };
   265 void G1MarkSweep::mark_sweep_phase2() {
   266   // Now all live objects are marked, compute the new object addresses.
   268   // It is imperative that we traverse perm_gen LAST. If dead space is
   269   // allowed a range of dead object may get overwritten by a dead int
   270   // array. If perm_gen is not traversed last a klassOop may get
   271   // overwritten. This is fine since it is dead, but if the class has dead
   272   // instances we have to skip them, and in order to find their size we
   273   // need the klassOop!
   274   //
   275   // It is not required that we traverse spaces in the same order in
   276   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
   277   // tracking expects us to do so. See comment under phase4.
   279   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   280   Generation* pg = g1h->perm_gen();
   282   TraceTime tm("phase 2", G1Log::fine() && Verbose, true, gclog_or_tty);
   283   GenMarkSweep::trace("2");
   285   // find the first region
   286   HeapRegion* r = g1h->region_at(0);
   287   CompactibleSpace* sp = r;
   288   if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
   289     sp = r->next_compaction_space();
   290   }
   292   G1PrepareCompactClosure blk(sp);
   293   g1h->heap_region_iterate(&blk);
   294   blk.update_sets();
   296   CompactPoint perm_cp(pg, NULL, NULL);
   297   pg->prepare_for_compaction(&perm_cp);
   298 }
   300 class G1AdjustPointersClosure: public HeapRegionClosure {
   301  public:
   302   bool doHeapRegion(HeapRegion* r) {
   303     if (r->isHumongous()) {
   304       if (r->startsHumongous()) {
   305         // We must adjust the pointers on the single H object.
   306         oop obj = oop(r->bottom());
   307         debug_only(GenMarkSweep::track_interior_pointers(obj));
   308         // point all the oops to the new location
   309         obj->adjust_pointers();
   310         debug_only(GenMarkSweep::check_interior_pointers());
   311       }
   312     } else {
   313       // This really ought to be "as_CompactibleSpace"...
   314       r->adjust_pointers();
   315     }
   316     return false;
   317   }
   318 };
   320 void G1MarkSweep::mark_sweep_phase3() {
   321   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   322   Generation* pg = g1h->perm_gen();
   324   // Adjust the pointers to reflect the new locations
   325   TraceTime tm("phase 3", G1Log::fine() && Verbose, true, gclog_or_tty);
   326   GenMarkSweep::trace("3");
   328   SharedHeap* sh = SharedHeap::heap();
   330   sh->process_strong_roots(true,  // activate StrongRootsScope
   331                            true,  // Collecting permanent generation.
   332                            SharedHeap::SO_AllClasses,
   333                            &GenMarkSweep::adjust_root_pointer_closure,
   334                            NULL,  // do not touch code cache here
   335                            &GenMarkSweep::adjust_pointer_closure);
   337   assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
   338   g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
   340   // Now adjust pointers in remaining weak roots.  (All of which should
   341   // have been cleared if they pointed to non-surviving objects.)
   342   g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
   343                              &GenMarkSweep::adjust_pointer_closure);
   345   GenMarkSweep::adjust_marks();
   347   G1AdjustPointersClosure blk;
   348   g1h->heap_region_iterate(&blk);
   349   pg->adjust_pointers();
   350 }
   352 class G1SpaceCompactClosure: public HeapRegionClosure {
   353 public:
   354   G1SpaceCompactClosure() {}
   356   bool doHeapRegion(HeapRegion* hr) {
   357     if (hr->isHumongous()) {
   358       if (hr->startsHumongous()) {
   359         oop obj = oop(hr->bottom());
   360         if (obj->is_gc_marked()) {
   361           obj->init_mark();
   362         } else {
   363           assert(hr->is_empty(), "Should have been cleared in phase 2.");
   364         }
   365         hr->reset_during_compaction();
   366       }
   367     } else {
   368       hr->compact();
   369     }
   370     return false;
   371   }
   372 };
   374 void G1MarkSweep::mark_sweep_phase4() {
   375   // All pointers are now adjusted, move objects accordingly
   377   // It is imperative that we traverse perm_gen first in phase4. All
   378   // classes must be allocated earlier than their instances, and traversing
   379   // perm_gen first makes sure that all klassOops have moved to their new
   380   // location before any instance does a dispatch through it's klass!
   382   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
   383   // in the same order in phase2, phase3 and phase4. We don't quite do that
   384   // here (perm_gen first rather than last), so we tell the validate code
   385   // to use a higher index (saved from phase2) when verifying perm_gen.
   386   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   387   Generation* pg = g1h->perm_gen();
   389   TraceTime tm("phase 4", G1Log::fine() && Verbose, true, gclog_or_tty);
   390   GenMarkSweep::trace("4");
   392   pg->compact();
   394   G1SpaceCompactClosure blk;
   395   g1h->heap_region_iterate(&blk);
   397 }

mercurial