ysr@777: /* pliden@6413: * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved. ysr@777: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. ysr@777: * ysr@777: * This code is free software; you can redistribute it and/or modify it ysr@777: * under the terms of the GNU General Public License version 2 only, as ysr@777: * published by the Free Software Foundation. ysr@777: * ysr@777: * This code is distributed in the hope that it will be useful, but WITHOUT ysr@777: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or ysr@777: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License ysr@777: * version 2 for more details (a copy is included in the LICENSE file that ysr@777: * accompanied this code). ysr@777: * ysr@777: * You should have received a copy of the GNU General Public License version ysr@777: * 2 along with this work; if not, write to the Free Software Foundation, ysr@777: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. ysr@777: * trims@1907: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA trims@1907: * or visit www.oracle.com if you need additional information or have any trims@1907: * questions. ysr@777: * ysr@777: */ ysr@777: stefank@2314: #include "precompiled.hpp" stefank@2314: #include "classfile/javaClasses.hpp" stefank@2314: #include "classfile/symbolTable.hpp" stefank@2314: #include "classfile/systemDictionary.hpp" stefank@2314: #include "classfile/vmSymbols.hpp" stefank@2314: #include "code/codeCache.hpp" stefank@2314: #include "code/icBuffer.hpp" brutisso@3710: #include "gc_implementation/g1/g1Log.hpp" stefank@2314: #include "gc_implementation/g1/g1MarkSweep.hpp" pliden@6413: #include "gc_implementation/g1/g1StringDedup.hpp" sla@5237: #include "gc_implementation/shared/gcHeapSummary.hpp" sla@5237: #include "gc_implementation/shared/gcTimer.hpp" sla@5237: #include "gc_implementation/shared/gcTrace.hpp" sla@5237: #include "gc_implementation/shared/gcTraceTime.hpp" stefank@2314: #include "memory/gcLocker.hpp" stefank@2314: #include "memory/genCollectedHeap.hpp" stefank@2314: #include "memory/modRefBarrierSet.hpp" stefank@2314: #include "memory/referencePolicy.hpp" stefank@2314: #include "memory/space.hpp" stefank@2314: #include "oops/instanceRefKlass.hpp" stefank@2314: #include "oops/oop.inline.hpp" stefank@2314: #include "prims/jvmtiExport.hpp" stefank@2314: #include "runtime/biasedLocking.hpp" stefank@2314: #include "runtime/fprofiler.hpp" stefank@2314: #include "runtime/synchronizer.hpp" stefank@2314: #include "runtime/thread.hpp" stefank@2314: #include "runtime/vmThread.hpp" stefank@2314: #include "utilities/copy.hpp" stefank@2314: #include "utilities/events.hpp" ysr@777: ysr@777: class HeapRegion; ysr@777: ysr@777: void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp, ysr@777: bool clear_all_softrefs) { ysr@777: assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); ysr@777: jmasa@1822: SharedHeap* sh = SharedHeap::heap(); jmasa@1822: #ifdef ASSERT jmasa@1822: if (sh->collector_policy()->should_clear_all_soft_refs()) { jmasa@1822: assert(clear_all_softrefs, "Policy should have been checked earler"); jmasa@1822: } jmasa@1822: #endif ysr@777: // hook up weak ref data so it can be used during Mark-Sweep ysr@777: assert(GenMarkSweep::ref_processor() == NULL, "no stomping"); ysr@888: assert(rp != NULL, "should be non-NULL"); johnc@3175: assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition"); johnc@3175: ysr@777: GenMarkSweep::_ref_processor = rp; ysr@892: rp->setup_policy(clear_all_softrefs); ysr@777: coleenp@4037: // When collecting the permanent generation Method*s may be moving, ysr@777: // so we either have to flush all bcp data or convert it into bci. ysr@777: CodeCache::gc_prologue(); ysr@777: Threads::gc_prologue(); ysr@777: ysr@777: bool marked_for_unloading = false; ysr@777: ysr@777: allocate_stacks(); ysr@777: iveresov@793: // We should save the marks of the currently locked biased monitors. iveresov@793: // The marking doesn't preserve the marks of biased objects. iveresov@793: BiasedLocking::preserve_marks(); iveresov@793: ysr@777: mark_sweep_phase1(marked_for_unloading, clear_all_softrefs); ysr@777: ysr@777: mark_sweep_phase2(); ysr@777: ysr@777: // Don't add any more derived pointers during phase3 ysr@777: COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); ysr@777: ysr@777: mark_sweep_phase3(); ysr@777: ysr@777: mark_sweep_phase4(); ysr@777: ysr@777: GenMarkSweep::restore_marks(); iveresov@793: BiasedLocking::restore_marks(); ysr@777: GenMarkSweep::deallocate_stacks(); ysr@777: ysr@777: // "free at last gc" is calculated from these. ysr@777: // CHF: cheating for now!!! ysr@777: // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity()); ysr@777: // Universe::set_heap_used_at_last_gc(Universe::heap()->used()); ysr@777: ysr@777: Threads::gc_epilogue(); ysr@777: CodeCache::gc_epilogue(); kamg@2467: JvmtiExport::gc_epilogue(); ysr@777: ysr@777: // refs processing: clean slate ysr@777: GenMarkSweep::_ref_processor = NULL; ysr@777: } ysr@777: ysr@777: ysr@777: void G1MarkSweep::allocate_stacks() { ysr@777: GenMarkSweep::_preserved_count_max = 0; ysr@777: GenMarkSweep::_preserved_marks = NULL; ysr@777: GenMarkSweep::_preserved_count = 0; ysr@777: } ysr@777: ysr@777: void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading, ysr@777: bool clear_all_softrefs) { ysr@777: // Recursively traverse all live objects and mark them sla@5237: GCTraceTime tm("phase 1", G1Log::fine() && Verbose, true, gc_timer()); ysr@777: GenMarkSweep::trace(" 1"); ysr@777: ysr@777: SharedHeap* sh = SharedHeap::heap(); ysr@777: coleenp@4037: // Need cleared claim bits for the strong roots processing coleenp@4037: ClassLoaderDataGraph::clear_claimed_marks(); coleenp@4037: coleenp@4037: sh->process_strong_roots(true, // activate StrongRootsScope coleenp@4037: false, // not scavenging. ysr@777: SharedHeap::SO_SystemClasses, ysr@777: &GenMarkSweep::follow_root_closure, jrose@1424: &GenMarkSweep::follow_code_root_closure, coleenp@4037: &GenMarkSweep::follow_klass_closure); ysr@777: ysr@777: // Process reference objects found during marking ysr@888: ReferenceProcessor* rp = GenMarkSweep::ref_processor(); johnc@3175: assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity"); johnc@3175: ysr@892: rp->setup_policy(clear_all_softrefs); sla@5237: const ReferenceProcessorStats& stats = sla@5237: rp->process_discovered_references(&GenMarkSweep::is_alive, sla@5237: &GenMarkSweep::keep_alive, sla@5237: &GenMarkSweep::follow_stack_closure, sla@5237: NULL, sla@5237: gc_timer()); sla@5237: gc_tracer()->report_gc_reference_stats(stats); ysr@777: stefank@5020: stefank@5020: // This is the point where the entire marking should have completed. stefank@5020: assert(GenMarkSweep::_marking_stack.is_empty(), "Marking should have completed"); stefank@5020: stefank@5020: // Unload classes and purge the SystemDictionary. ysr@777: bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive); ysr@777: stefank@5020: // Unload nmethods. brutisso@4098: CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class); ysr@777: stefank@5020: // Prune dead klasses from subklass/sibling/implementor lists. coleenp@4037: Klass::clean_weak_klass_links(&GenMarkSweep::is_alive); ysr@777: tschatzl@6229: // Delete entries for dead interned string and clean up unreferenced symbols in symbol table. tschatzl@6229: G1CollectedHeap::heap()->unlink_string_and_symbol_table(&GenMarkSweep::is_alive); ysr@777: johnc@2969: if (VerifyDuringGC) { johnc@2969: HandleMark hm; // handle scope johnc@2969: COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact); johnc@2969: Universe::heap()->prepare_for_verify(); johnc@2969: // Note: we can verify only the heap here. When an object is johnc@2969: // marked, the previous value of the mark word (including johnc@2969: // identity hash values, ages, etc) is preserved, and the mark johnc@2969: // word is set to markOop::marked_value - effectively removing johnc@2969: // any hash values from the mark word. These hash values are johnc@2969: // used when verifying the dictionaries and so removing them johnc@2969: // from the mark word can make verification of the dictionaries johnc@2969: // fail. At the end of the GC, the orginal mark word values johnc@2969: // (including hash values) are restored to the appropriate johnc@2969: // objects. stefank@5018: if (!VerifySilently) { stefank@5018: gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying "); stefank@5018: } stefank@5018: Universe::heap()->verify(VerifySilently, VerifyOption_G1UseMarkWord); stefank@5018: if (!VerifySilently) { stefank@5018: gclog_or_tty->print_cr("]"); stefank@5018: } johnc@2969: } sla@5237: sla@5237: gc_tracer()->report_object_count_after_gc(&GenMarkSweep::is_alive); ysr@777: } ysr@777: ysr@777: class G1PrepareCompactClosure: public HeapRegionClosure { tonyp@2472: G1CollectedHeap* _g1h; ysr@777: ModRefBarrierSet* _mrbs; ysr@777: CompactPoint _cp; brutisso@6385: HeapRegionSetCount _humongous_regions_removed; ysr@777: ysr@777: void free_humongous_region(HeapRegion* hr) { ysr@777: HeapWord* end = hr->end(); tonyp@2643: FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep"); tonyp@2643: ysr@777: assert(hr->startsHumongous(), ysr@777: "Only the start of a humongous region should be freed."); brutisso@6385: brutisso@6385: hr->set_containing_set(NULL); brutisso@6385: _humongous_regions_removed.increment(1u, hr->capacity()); brutisso@6385: brutisso@6385: _g1h->free_humongous_region(hr, &dummy_free_list, false /* par */); ysr@777: hr->prepare_for_compaction(&_cp); ysr@777: // Also clear the part of the card table that will be unused after ysr@777: // compaction. tonyp@2472: _mrbs->clear(MemRegion(hr->compaction_top(), end)); tonyp@2643: dummy_free_list.remove_all(); ysr@777: } ysr@777: ysr@777: public: tonyp@2472: G1PrepareCompactClosure(CompactibleSpace* cs) tonyp@2472: : _g1h(G1CollectedHeap::heap()), mgerdin@5811: _mrbs(_g1h->g1_barrier_set()), ysr@777: _cp(NULL, cs, cs->initialize_threshold()), brutisso@6385: _humongous_regions_removed() { } tonyp@2472: tonyp@2472: void update_sets() { tonyp@2472: // We'll recalculate total used bytes and recreate the free list tonyp@2472: // at the end of the GC, so no point in updating those values here. brutisso@6385: HeapRegionSetCount empty_set; brutisso@6385: _g1h->remove_from_old_sets(empty_set, _humongous_regions_removed); tonyp@2472: } tonyp@2472: ysr@777: bool doHeapRegion(HeapRegion* hr) { ysr@777: if (hr->isHumongous()) { ysr@777: if (hr->startsHumongous()) { ysr@777: oop obj = oop(hr->bottom()); ysr@777: if (obj->is_gc_marked()) { ysr@777: obj->forward_to(obj); ysr@777: } else { ysr@777: free_humongous_region(hr); ysr@777: } ysr@777: } else { ysr@777: assert(hr->continuesHumongous(), "Invalid humongous."); ysr@777: } ysr@777: } else { ysr@777: hr->prepare_for_compaction(&_cp); ysr@777: // Also clear the part of the card table that will be unused after ysr@777: // compaction. ysr@777: _mrbs->clear(MemRegion(hr->compaction_top(), hr->end())); ysr@777: } ysr@777: return false; ysr@777: } ysr@777: }; apetrusenko@1112: ysr@777: void G1MarkSweep::mark_sweep_phase2() { ysr@777: // Now all live objects are marked, compute the new object addresses. ysr@777: ysr@777: // It is not required that we traverse spaces in the same order in ysr@777: // phase2, phase3 and phase4, but the ValidateMarkSweep live oops ysr@777: // tracking expects us to do so. See comment under phase4. ysr@777: ysr@777: G1CollectedHeap* g1h = G1CollectedHeap::heap(); ysr@777: sla@5237: GCTraceTime tm("phase 2", G1Log::fine() && Verbose, true, gc_timer()); ysr@777: GenMarkSweep::trace("2"); ysr@777: tonyp@3957: // find the first region tonyp@3957: HeapRegion* r = g1h->region_at(0); ysr@777: CompactibleSpace* sp = r; ysr@777: if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) { ysr@777: sp = r->next_compaction_space(); ysr@777: } ysr@777: apetrusenko@1112: G1PrepareCompactClosure blk(sp); ysr@777: g1h->heap_region_iterate(&blk); tonyp@2472: blk.update_sets(); ysr@777: } ysr@777: ysr@777: class G1AdjustPointersClosure: public HeapRegionClosure { ysr@777: public: ysr@777: bool doHeapRegion(HeapRegion* r) { ysr@777: if (r->isHumongous()) { ysr@777: if (r->startsHumongous()) { ysr@777: // We must adjust the pointers on the single H object. ysr@777: oop obj = oop(r->bottom()); ysr@777: // point all the oops to the new location ysr@777: obj->adjust_pointers(); ysr@777: } ysr@777: } else { ysr@777: // This really ought to be "as_CompactibleSpace"... ysr@777: r->adjust_pointers(); ysr@777: } ysr@777: return false; ysr@777: } ysr@777: }; ysr@777: ysr@777: void G1MarkSweep::mark_sweep_phase3() { ysr@777: G1CollectedHeap* g1h = G1CollectedHeap::heap(); ysr@777: ysr@777: // Adjust the pointers to reflect the new locations sla@5237: GCTraceTime tm("phase 3", G1Log::fine() && Verbose, true, gc_timer()); ysr@777: GenMarkSweep::trace("3"); ysr@777: ysr@777: SharedHeap* sh = SharedHeap::heap(); ysr@777: coleenp@4037: // Need cleared claim bits for the strong roots processing coleenp@4037: ClassLoaderDataGraph::clear_claimed_marks(); coleenp@4037: jrose@1424: sh->process_strong_roots(true, // activate StrongRootsScope coleenp@4037: false, // not scavenging. ysr@777: SharedHeap::SO_AllClasses, stefank@5011: &GenMarkSweep::adjust_pointer_closure, jrose@1424: NULL, // do not touch code cache here coleenp@4037: &GenMarkSweep::adjust_klass_closure); ysr@777: johnc@3175: assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity"); stefank@5011: g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_pointer_closure); ysr@777: ysr@777: // Now adjust pointers in remaining weak roots. (All of which should ysr@777: // have been cleared if they pointed to non-surviving objects.) stefank@5011: g1h->g1_process_weak_roots(&GenMarkSweep::adjust_pointer_closure); ysr@777: pliden@6413: if (G1StringDedup::is_enabled()) { pliden@6413: G1StringDedup::oops_do(&GenMarkSweep::adjust_pointer_closure); pliden@6413: } pliden@6413: ysr@777: GenMarkSweep::adjust_marks(); ysr@777: ysr@777: G1AdjustPointersClosure blk; ysr@777: g1h->heap_region_iterate(&blk); ysr@777: } ysr@777: ysr@777: class G1SpaceCompactClosure: public HeapRegionClosure { ysr@777: public: ysr@777: G1SpaceCompactClosure() {} ysr@777: ysr@777: bool doHeapRegion(HeapRegion* hr) { ysr@777: if (hr->isHumongous()) { ysr@777: if (hr->startsHumongous()) { ysr@777: oop obj = oop(hr->bottom()); ysr@777: if (obj->is_gc_marked()) { ysr@777: obj->init_mark(); ysr@777: } else { ysr@777: assert(hr->is_empty(), "Should have been cleared in phase 2."); ysr@777: } ysr@777: hr->reset_during_compaction(); ysr@777: } ysr@777: } else { ysr@777: hr->compact(); ysr@777: } ysr@777: return false; ysr@777: } ysr@777: }; ysr@777: ysr@777: void G1MarkSweep::mark_sweep_phase4() { ysr@777: // All pointers are now adjusted, move objects accordingly ysr@777: ysr@777: // The ValidateMarkSweep live oops tracking expects us to traverse spaces ysr@777: // in the same order in phase2, phase3 and phase4. We don't quite do that coleenp@4037: // here (code and comment not fixed for perm removal), so we tell the validate code ysr@777: // to use a higher index (saved from phase2) when verifying perm_gen. ysr@777: G1CollectedHeap* g1h = G1CollectedHeap::heap(); ysr@777: sla@5237: GCTraceTime tm("phase 4", G1Log::fine() && Verbose, true, gc_timer()); ysr@777: GenMarkSweep::trace("4"); ysr@777: ysr@777: G1SpaceCompactClosure blk; ysr@777: g1h->heap_region_iterate(&blk); ysr@777: ysr@777: }