src/share/vm/gc_implementation/g1/g1MarkSweep.cpp

Fri, 10 Oct 2014 15:51:58 +0200

author
tschatzl
date
Fri, 10 Oct 2014 15:51:58 +0200
changeset 7257
e7d0505c8a30
parent 7131
d35872270666
child 7535
7ae4e26cb1e0
child 7659
38d6febe66af
permissions
-rw-r--r--

8059758: Footprint regressions with JDK-8038423
Summary: Changes in JDK-8038423 always initialize (zero out) virtual memory used for auxiliary data structures. This causes a footprint regression for G1 in startup benchmarks. This is because they do not touch that memory at all, so the operating system does not actually commit these pages. The fix is to, if the initialization value of the data structures matches the default value of just committed memory (=0), do not do anything.
Reviewed-by: jwilhelm, brutisso

     1 /*
     2  * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/symbolTable.hpp"
    28 #include "classfile/systemDictionary.hpp"
    29 #include "classfile/vmSymbols.hpp"
    30 #include "code/codeCache.hpp"
    31 #include "code/icBuffer.hpp"
    32 #include "gc_implementation/g1/g1Log.hpp"
    33 #include "gc_implementation/g1/g1MarkSweep.hpp"
    34 #include "gc_implementation/g1/g1StringDedup.hpp"
    35 #include "gc_implementation/shared/gcHeapSummary.hpp"
    36 #include "gc_implementation/shared/gcTimer.hpp"
    37 #include "gc_implementation/shared/gcTrace.hpp"
    38 #include "gc_implementation/shared/gcTraceTime.hpp"
    39 #include "memory/gcLocker.hpp"
    40 #include "memory/genCollectedHeap.hpp"
    41 #include "memory/modRefBarrierSet.hpp"
    42 #include "memory/referencePolicy.hpp"
    43 #include "memory/space.hpp"
    44 #include "oops/instanceRefKlass.hpp"
    45 #include "oops/oop.inline.hpp"
    46 #include "prims/jvmtiExport.hpp"
    47 #include "runtime/biasedLocking.hpp"
    48 #include "runtime/fprofiler.hpp"
    49 #include "runtime/synchronizer.hpp"
    50 #include "runtime/thread.hpp"
    51 #include "runtime/vmThread.hpp"
    52 #include "utilities/copy.hpp"
    53 #include "utilities/events.hpp"
    55 class HeapRegion;
    57 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
    58                                       bool clear_all_softrefs) {
    59   assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
    61   SharedHeap* sh = SharedHeap::heap();
    62 #ifdef ASSERT
    63   if (sh->collector_policy()->should_clear_all_soft_refs()) {
    64     assert(clear_all_softrefs, "Policy should have been checked earler");
    65   }
    66 #endif
    67   // hook up weak ref data so it can be used during Mark-Sweep
    68   assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
    69   assert(rp != NULL, "should be non-NULL");
    70   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
    72   GenMarkSweep::_ref_processor = rp;
    73   rp->setup_policy(clear_all_softrefs);
    75   // When collecting the permanent generation Method*s may be moving,
    76   // so we either have to flush all bcp data or convert it into bci.
    77   CodeCache::gc_prologue();
    78   Threads::gc_prologue();
    80   bool marked_for_unloading = false;
    82   allocate_stacks();
    84   // We should save the marks of the currently locked biased monitors.
    85   // The marking doesn't preserve the marks of biased objects.
    86   BiasedLocking::preserve_marks();
    88   mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
    90   mark_sweep_phase2();
    92   // Don't add any more derived pointers during phase3
    93   COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
    95   mark_sweep_phase3();
    97   mark_sweep_phase4();
    99   GenMarkSweep::restore_marks();
   100   BiasedLocking::restore_marks();
   101   GenMarkSweep::deallocate_stacks();
   103   // "free at last gc" is calculated from these.
   104   // CHF: cheating for now!!!
   105   //  Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
   106   //  Universe::set_heap_used_at_last_gc(Universe::heap()->used());
   108   Threads::gc_epilogue();
   109   CodeCache::gc_epilogue();
   110   JvmtiExport::gc_epilogue();
   112   // refs processing: clean slate
   113   GenMarkSweep::_ref_processor = NULL;
   114 }
   117 void G1MarkSweep::allocate_stacks() {
   118   GenMarkSweep::_preserved_count_max = 0;
   119   GenMarkSweep::_preserved_marks = NULL;
   120   GenMarkSweep::_preserved_count = 0;
   121 }
   123 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
   124                                     bool clear_all_softrefs) {
   125   // Recursively traverse all live objects and mark them
   126   GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
   127   GenMarkSweep::trace(" 1");
   129   SharedHeap* sh = SharedHeap::heap();
   131   // Need cleared claim bits for the roots processing
   132   ClassLoaderDataGraph::clear_claimed_marks();
   134   MarkingCodeBlobClosure follow_code_closure(&GenMarkSweep::follow_root_closure, !CodeBlobToOopClosure::FixRelocations);
   135   sh->process_strong_roots(true,   // activate StrongRootsScope
   136                            SharedHeap::SO_None,
   137                            &GenMarkSweep::follow_root_closure,
   138                            &GenMarkSweep::follow_cld_closure,
   139                            &follow_code_closure);
   141   // Process reference objects found during marking
   142   ReferenceProcessor* rp = GenMarkSweep::ref_processor();
   143   assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity");
   145   rp->setup_policy(clear_all_softrefs);
   146   const ReferenceProcessorStats& stats =
   147     rp->process_discovered_references(&GenMarkSweep::is_alive,
   148                                       &GenMarkSweep::keep_alive,
   149                                       &GenMarkSweep::follow_stack_closure,
   150                                       NULL,
   151                                       gc_timer(),
   152                                       gc_tracer()->gc_id());
   153   gc_tracer()->report_gc_reference_stats(stats);
   156   // This is the point where the entire marking should have completed.
   157   assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed");
   159   // Unload classes and purge the SystemDictionary.
   160   bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
   162   // Unload nmethods.
   163   CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
   165   // Prune dead klasses from subklass/sibling/implementor lists.
   166   Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
   168   // Delete entries for dead interned string and clean up unreferenced symbols in symbol table.
   169   G1CollectedHeap::heap()->unlink_string_and_symbol_table(&GenMarkSweep::is_alive);
   171   if (VerifyDuringGC) {
   172     HandleMark hm;  // handle scope
   173     COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
   174     Universe::heap()->prepare_for_verify();
   175     // Note: we can verify only the heap here. When an object is
   176     // marked, the previous value of the mark word (including
   177     // identity hash values, ages, etc) is preserved, and the mark
   178     // word is set to markOop::marked_value - effectively removing
   179     // any hash values from the mark word. These hash values are
   180     // used when verifying the dictionaries and so removing them
   181     // from the mark word can make verification of the dictionaries
   182     // fail. At the end of the GC, the orginal mark word values
   183     // (including hash values) are restored to the appropriate
   184     // objects.
   185     if (!VerifySilently) {
   186       gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
   187     }
   188     Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord);
   189     if (!VerifySilently) {
   190       gclog_or_tty->print_cr("]");
   191     }
   192   }
   194   gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive);
   195 }
   198 void G1MarkSweep::mark_sweep_phase2() {
   199   // Now all live objects are marked, compute the new object addresses.
   201   // It is not required that we traverse spaces in the same order in
   202   // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
   203   // tracking expects us to do so. See comment under phase4.
   205   GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
   206   GenMarkSweep::trace("2");
   208   prepare_compaction();
   209 }
   211 class G1AdjustPointersClosure: public HeapRegionClosure {
   212  public:
   213   bool doHeapRegion(HeapRegion* r) {
   214     if (r->isHumongous()) {
   215       if (r->startsHumongous()) {
   216         // We must adjust the pointers on the single H object.
   217         oop obj = oop(r->bottom());
   218         // point all the oops to the new location
   219         obj->adjust_pointers();
   220       }
   221     } else {
   222       // This really ought to be "as_CompactibleSpace"...
   223       r->adjust_pointers();
   224     }
   225     return false;
   226   }
   227 };
   229 void G1MarkSweep::mark_sweep_phase3() {
   230   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   232   // Adjust the pointers to reflect the new locations
   233   GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
   234   GenMarkSweep::trace("3");
   236   SharedHeap* sh = SharedHeap::heap();
   238   // Need cleared claim bits for the roots processing
   239   ClassLoaderDataGraph::clear_claimed_marks();
   241   CodeBlobToOopClosure adjust_code_closure(&GenMarkSweep::adjust_pointer_closure, CodeBlobToOopClosure::FixRelocations);
   242   sh->process_all_roots(true,  // activate StrongRootsScope
   243                         SharedHeap::SO_AllCodeCache,
   244                         &GenMarkSweep::adjust_pointer_closure,
   245                         &GenMarkSweep::adjust_cld_closure,
   246                         &adjust_code_closure);
   248   assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
   249   g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure);
   251   // Now adjust pointers in remaining weak roots.  (All of which should
   252   // have been cleared if they pointed to non-surviving objects.)
   253   sh->process_weak_roots(&GenMarkSweep::adjust_pointer_closure);
   255   if (G1StringDedup::is_enabled()) {
   256     G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure);
   257   }
   259   GenMarkSweep::adjust_marks();
   261   G1AdjustPointersClosure blk;
   262   g1h->heap_region_iterate(&blk);
   263 }
   265 class G1SpaceCompactClosure: public HeapRegionClosure {
   266 public:
   267   G1SpaceCompactClosure() {}
   269   bool doHeapRegion(HeapRegion* hr) {
   270     if (hr->isHumongous()) {
   271       if (hr->startsHumongous()) {
   272         oop obj = oop(hr->bottom());
   273         if (obj->is_gc_marked()) {
   274           obj->init_mark();
   275         } else {
   276           assert(hr->is_empty(), "Should have been cleared in phase 2.");
   277         }
   278         hr->reset_during_compaction();
   279       }
   280     } else {
   281       hr->compact();
   282     }
   283     return false;
   284   }
   285 };
   287 void G1MarkSweep::mark_sweep_phase4() {
   288   // All pointers are now adjusted, move objects accordingly
   290   // The ValidateMarkSweep live oops tracking expects us to traverse spaces
   291   // in the same order in phase2, phase3 and phase4. We don't quite do that
   292   // here (code and comment not fixed for perm removal), so we tell the validate code
   293   // to use a higher index (saved from phase2) when verifying perm_gen.
   294   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   296   GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer(), gc_tracer()->gc_id());
   297   GenMarkSweep::trace("4");
   299   G1SpaceCompactClosure blk;
   300   g1h->heap_region_iterate(&blk);
   302 }
   304 void G1MarkSweep::prepare_compaction_work(G1PrepareCompactClosure* blk) {
   305   G1CollectedHeap* g1h = G1CollectedHeap::heap();
   306   g1h->heap_region_iterate(blk);
   307   blk->update_sets();
   308 }
   310 void G1PrepareCompactClosure::free_humongous_region(HeapRegion* hr) {
   311   HeapWord* end = hr->end();
   312   FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
   314   assert(hr->startsHumongous(),
   315          "Only the start of a humongous region should be freed.");
   317   hr->set_containing_set(NULL);
   318   _humongous_regions_removed.increment(1u, hr->capacity());
   320   _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */);
   321   prepare_for_compaction(hr, end);
   322   dummy_free_list.remove_all();
   323 }
   325 void G1PrepareCompactClosure::prepare_for_compaction(HeapRegion* hr, HeapWord* end) {
   326   // If this is the first live region that we came across which we can compact,
   327   // initialize the CompactPoint.
   328   if (!is_cp_initialized()) {
   329     _cp.space = hr;
   330     _cp.threshold = hr->initialize_threshold();
   331   }
   332   prepare_for_compaction_work(&_cp, hr, end);
   333 }
   335 void G1PrepareCompactClosure::prepare_for_compaction_work(CompactPoint* cp,
   336                                                           HeapRegion* hr,
   337                                                           HeapWord* end) {
   338   hr->prepare_for_compaction(cp);
   339   // Also clear the part of the card table that will be unused after
   340   // compaction.
   341   _mrbs->clear(MemRegion(hr->compaction_top(), end));
   342 }
   344 void G1PrepareCompactClosure::update_sets() {
   345   // We'll recalculate total used bytes and recreate the free list
   346   // at the end of the GC, so no point in updating those values here.
   347   HeapRegionSetCount empty_set;
   348   _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed);
   349 }
   351 bool G1PrepareCompactClosure::doHeapRegion(HeapRegion* hr) {
   352   if (hr->isHumongous()) {
   353     if (hr->startsHumongous()) {
   354       oop obj = oop(hr->bottom());
   355       if (obj->is_gc_marked()) {
   356         obj->forward_to(obj);
   357       } else  {
   358         free_humongous_region(hr);
   359       }
   360     } else {
   361       assert(hr->continuesHumongous(), "Invalid humongous.");
   362     }
   363   } else {
   364     prepare_for_compaction(hr, hr->end());
   365   }
   366   return false;
   367 }

mercurial