duke@435: /* never@3499: * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * trims@1907: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA trims@1907: * or visit www.oracle.com if you need additional information or have any trims@1907: * questions. duke@435: * duke@435: */ duke@435: stefank@2314: #include "precompiled.hpp" stefank@2314: #include "classfile/javaClasses.hpp" stefank@2314: #include "classfile/symbolTable.hpp" stefank@2314: #include "classfile/systemDictionary.hpp" stefank@2314: #include "classfile/vmSymbols.hpp" stefank@2314: #include "code/codeCache.hpp" stefank@2314: #include "code/icBuffer.hpp" stefank@2314: #include "gc_interface/collectedHeap.inline.hpp" stefank@2314: #include "memory/genCollectedHeap.hpp" stefank@2314: #include "memory/genMarkSweep.hpp" stefank@2314: #include "memory/genOopClosures.inline.hpp" stefank@2314: #include "memory/generation.inline.hpp" stefank@2314: #include "memory/modRefBarrierSet.hpp" stefank@2314: #include "memory/referencePolicy.hpp" stefank@2314: #include "memory/space.hpp" stefank@2314: #include "oops/instanceRefKlass.hpp" stefank@2314: #include "oops/oop.inline.hpp" stefank@2314: #include "prims/jvmtiExport.hpp" stefank@2314: #include "runtime/fprofiler.hpp" stefank@2314: #include "runtime/handles.inline.hpp" stefank@2314: #include "runtime/synchronizer.hpp" stefank@4299: #include "runtime/thread.inline.hpp" stefank@2314: #include "runtime/vmThread.hpp" stefank@2314: #include "utilities/copy.hpp" stefank@2314: #include "utilities/events.hpp" duke@435: duke@435: void GenMarkSweep::invoke_at_safepoint(int level, ReferenceProcessor* rp, duke@435: bool clear_all_softrefs) { duke@435: assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); duke@435: jmasa@1822: GenCollectedHeap* gch = GenCollectedHeap::heap(); jmasa@1822: #ifdef ASSERT jmasa@1822: if (gch->collector_policy()->should_clear_all_soft_refs()) { jmasa@1822: assert(clear_all_softrefs, "Policy should have been checked earlier"); jmasa@1822: } jmasa@1822: #endif jmasa@1822: duke@435: // hook up weak ref data so it can be used during Mark-Sweep duke@435: assert(ref_processor() == NULL, "no stomping"); ysr@888: assert(rp != NULL, "should be non-NULL"); duke@435: _ref_processor = rp; ysr@892: rp->setup_policy(clear_all_softrefs); duke@435: brutisso@3767: TraceTime t1(GCCauseString("Full GC", gch->gc_cause()), PrintGC && !PrintGCDetails, true, gclog_or_tty); duke@435: coleenp@4037: // When collecting the permanent generation Method*s may be moving, duke@435: // so we either have to flush all bcp data or convert it into bci. duke@435: CodeCache::gc_prologue(); duke@435: Threads::gc_prologue(); duke@435: coleenp@4037: // Increment the invocation count coleenp@4037: _total_invocations++; duke@435: duke@435: // Capture heap size before collection for printing. duke@435: size_t gch_prev_used = gch->used(); duke@435: duke@435: // Some of the card table updates below assume that the perm gen is duke@435: // also being collected. duke@435: assert(level == gch->n_gens() - 1, duke@435: "All generations are being collected, ergo perm gen too."); duke@435: duke@435: // Capture used regions for each generation that will be duke@435: // subject to collection, so that card table adjustments can duke@435: // be made intelligently (see clear / invalidate further below). coleenp@4037: gch->save_used_regions(level); duke@435: duke@435: allocate_stacks(); duke@435: duke@435: mark_sweep_phase1(level, clear_all_softrefs); duke@435: duke@435: mark_sweep_phase2(); duke@435: duke@435: // Don't add any more derived pointers during phase3 duke@435: COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity")); duke@435: COMPILER2_PRESENT(DerivedPointerTable::set_active(false)); duke@435: duke@435: mark_sweep_phase3(level); duke@435: duke@435: mark_sweep_phase4(); duke@435: duke@435: restore_marks(); duke@435: duke@435: // Set saved marks for allocation profiler (and other things? -- dld) duke@435: // (Should this be in general part?) duke@435: gch->save_marks(); duke@435: duke@435: deallocate_stacks(); duke@435: duke@435: // If compaction completely evacuated all generations younger than this duke@435: // one, then we can clear the card table. Otherwise, we must invalidate duke@435: // it (consider all cards dirty). In the future, we might consider doing duke@435: // compaction within generations only, and doing card-table sliding. duke@435: bool all_empty = true; duke@435: for (int i = 0; all_empty && i < level; i++) { duke@435: Generation* g = gch->get_gen(i); duke@435: all_empty = all_empty && gch->get_gen(i)->used() == 0; duke@435: } duke@435: GenRemSet* rs = gch->rem_set(); duke@435: // Clear/invalidate below make use of the "prev_used_regions" saved earlier. duke@435: if (all_empty) { duke@435: // We've evacuated all generations below us. duke@435: Generation* g = gch->get_gen(level); coleenp@4037: rs->clear_into_younger(g); duke@435: } else { duke@435: // Invalidate the cards corresponding to the currently used duke@435: // region and clear those corresponding to the evacuated region duke@435: // of all generations just collected (i.e. level and younger). duke@435: rs->invalidate_or_clear(gch->get_gen(level), coleenp@4037: true /* younger */); duke@435: } duke@435: duke@435: Threads::gc_epilogue(); duke@435: CodeCache::gc_epilogue(); kamg@2467: JvmtiExport::gc_epilogue(); duke@435: duke@435: if (PrintGC && !PrintGCDetails) { duke@435: gch->print_heap_change(gch_prev_used); duke@435: } duke@435: duke@435: // refs processing: clean slate duke@435: _ref_processor = NULL; duke@435: duke@435: // Update heap occupancy information which is used as duke@435: // input to soft ref clearing policy at the next gc. duke@435: Universe::update_heap_info_at_gc(); duke@435: duke@435: // Update time of last gc for all generations we collected duke@435: // (which curently is all the generations in the heap). johnc@3538: // We need to use a monotonically non-deccreasing time in ms johnc@3538: // or we will see time-warp warnings and os::javaTimeMillis() johnc@3538: // does not guarantee monotonicity. johnc@3538: jlong now = os::javaTimeNanos() / NANOSECS_PER_MILLISEC; johnc@3538: gch->update_time_of_last_gc(now); duke@435: } duke@435: duke@435: void GenMarkSweep::allocate_stacks() { duke@435: GenCollectedHeap* gch = GenCollectedHeap::heap(); duke@435: // Scratch request on behalf of oldest generation; will do no duke@435: // allocation. duke@435: ScratchBlock* scratch = gch->gather_scratch(gch->_gens[gch->_n_gens-1], 0); duke@435: duke@435: // $$$ To cut a corner, we'll only use the first scratch block, and then duke@435: // revert to malloc. duke@435: if (scratch != NULL) { duke@435: _preserved_count_max = duke@435: scratch->num_words * HeapWordSize / sizeof(PreservedMark); duke@435: } else { duke@435: _preserved_count_max = 0; duke@435: } duke@435: duke@435: _preserved_marks = (PreservedMark*)scratch; duke@435: _preserved_count = 0; duke@435: } duke@435: duke@435: duke@435: void GenMarkSweep::deallocate_stacks() { tonyp@791: if (!UseG1GC) { tonyp@791: GenCollectedHeap* gch = GenCollectedHeap::heap(); tonyp@791: gch->release_scratch(); tonyp@791: } jmasa@698: jcoomes@2191: _preserved_mark_stack.clear(true); jcoomes@2191: _preserved_oop_stack.clear(true); jcoomes@2191: _marking_stack.clear(); jcoomes@2191: _objarray_stack.clear(true); duke@435: } duke@435: duke@435: void GenMarkSweep::mark_sweep_phase1(int level, duke@435: bool clear_all_softrefs) { duke@435: // Recursively traverse all live objects and mark them duke@435: TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty); duke@435: trace(" 1"); duke@435: duke@435: GenCollectedHeap* gch = GenCollectedHeap::heap(); duke@435: duke@435: // Because follow_root_closure is created statically, cannot duke@435: // use OopsInGenClosure constructor which takes a generation, duke@435: // as the Universe has not been created when the static constructors duke@435: // are run. duke@435: follow_root_closure.set_orig_generation(gch->get_gen(level)); duke@435: coleenp@4037: // Need new claim bits before marking starts. coleenp@4037: ClassLoaderDataGraph::clear_claimed_marks(); coleenp@4037: duke@435: gch->gen_process_strong_roots(level, duke@435: false, // Younger gens are not roots. jrose@1424: true, // activate StrongRootsScope coleenp@4037: false, // not scavenging duke@435: SharedHeap::SO_SystemClasses, jrose@1424: &follow_root_closure, jrose@1424: true, // walk code active on stacks coleenp@4037: &follow_root_closure, coleenp@4037: &follow_klass_closure); duke@435: duke@435: // Process reference objects found during marking duke@435: { ysr@892: ref_processor()->setup_policy(clear_all_softrefs); duke@435: ref_processor()->process_discovered_references( ysr@888: &is_alive, &keep_alive, &follow_stack_closure, NULL); duke@435: } duke@435: duke@435: // Follow system dictionary roots and unload classes duke@435: bool purged_class = SystemDictionary::do_unloading(&is_alive); duke@435: duke@435: // Follow code cache roots brutisso@4098: CodeCache::do_unloading(&is_alive, purged_class); duke@435: follow_stack(); // Flush marking stack duke@435: duke@435: // Update subklass/sibling/implementor links of live klasses coleenp@4037: Klass::clean_weak_klass_links(&is_alive); jcoomes@2191: assert(_marking_stack.is_empty(), "just drained"); ysr@1376: coleenp@2497: // Visit interned string tables and delete unmarked oops duke@435: StringTable::unlink(&is_alive); coleenp@2497: // Clean up unreferenced symbols in symbol table. coleenp@2497: SymbolTable::unlink(); duke@435: jcoomes@2191: assert(_marking_stack.is_empty(), "stack should be empty by now"); duke@435: } duke@435: duke@435: duke@435: void GenMarkSweep::mark_sweep_phase2() { duke@435: // Now all live objects are marked, compute the new object addresses. duke@435: duke@435: // It is imperative that we traverse perm_gen LAST. If dead space is duke@435: // allowed a range of dead object may get overwritten by a dead int coleenp@4037: // array. If perm_gen is not traversed last a Klass* may get duke@435: // overwritten. This is fine since it is dead, but if the class has dead duke@435: // instances we have to skip them, and in order to find their size we coleenp@4037: // need the Klass*! duke@435: // duke@435: // It is not required that we traverse spaces in the same order in duke@435: // phase2, phase3 and phase4, but the ValidateMarkSweep live oops duke@435: // tracking expects us to do so. See comment under phase4. duke@435: duke@435: GenCollectedHeap* gch = GenCollectedHeap::heap(); duke@435: duke@435: TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty); duke@435: trace("2"); duke@435: duke@435: gch->prepare_for_compaction(); duke@435: } duke@435: duke@435: class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure { duke@435: public: duke@435: void do_generation(Generation* gen) { duke@435: gen->adjust_pointers(); duke@435: } duke@435: }; duke@435: duke@435: void GenMarkSweep::mark_sweep_phase3(int level) { duke@435: GenCollectedHeap* gch = GenCollectedHeap::heap(); duke@435: duke@435: // Adjust the pointers to reflect the new locations duke@435: TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty); duke@435: trace("3"); duke@435: coleenp@4037: // Need new claim bits for the pointer adjustment tracing. coleenp@4037: ClassLoaderDataGraph::clear_claimed_marks(); duke@435: duke@435: // Because the two closures below are created statically, cannot duke@435: // use OopsInGenClosure constructor which takes a generation, duke@435: // as the Universe has not been created when the static constructors duke@435: // are run. duke@435: adjust_root_pointer_closure.set_orig_generation(gch->get_gen(level)); duke@435: adjust_pointer_closure.set_orig_generation(gch->get_gen(level)); duke@435: duke@435: gch->gen_process_strong_roots(level, duke@435: false, // Younger gens are not roots. jrose@1424: true, // activate StrongRootsScope coleenp@4037: false, // not scavenging duke@435: SharedHeap::SO_AllClasses, duke@435: &adjust_root_pointer_closure, jrose@1424: false, // do not walk code coleenp@4037: &adjust_root_pointer_closure, coleenp@4037: &adjust_klass_closure); duke@435: duke@435: // Now adjust pointers in remaining weak roots. (All of which should duke@435: // have been cleared if they pointed to non-surviving objects.) jrose@1424: CodeBlobToOopClosure adjust_code_pointer_closure(&adjust_pointer_closure, jrose@1424: /*do_marking=*/ false); duke@435: gch->gen_process_weak_roots(&adjust_root_pointer_closure, jrose@1424: &adjust_code_pointer_closure, duke@435: &adjust_pointer_closure); duke@435: duke@435: adjust_marks(); duke@435: GenAdjustPointersClosure blk; duke@435: gch->generation_iterate(&blk, true); duke@435: } duke@435: duke@435: class GenCompactClosure: public GenCollectedHeap::GenClosure { duke@435: public: duke@435: void do_generation(Generation* gen) { duke@435: gen->compact(); duke@435: } duke@435: }; duke@435: duke@435: void GenMarkSweep::mark_sweep_phase4() { duke@435: // All pointers are now adjusted, move objects accordingly duke@435: duke@435: // It is imperative that we traverse perm_gen first in phase4. All duke@435: // classes must be allocated earlier than their instances, and traversing coleenp@4037: // perm_gen first makes sure that all Klass*s have moved to their new duke@435: // location before any instance does a dispatch through it's klass! duke@435: duke@435: // The ValidateMarkSweep live oops tracking expects us to traverse spaces duke@435: // in the same order in phase2, phase3 and phase4. We don't quite do that duke@435: // here (perm_gen first rather than last), so we tell the validate code duke@435: // to use a higher index (saved from phase2) when verifying perm_gen. duke@435: GenCollectedHeap* gch = GenCollectedHeap::heap(); duke@435: duke@435: TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty); duke@435: trace("4"); duke@435: duke@435: GenCompactClosure blk; duke@435: gch->generation_iterate(&blk, true); duke@435: }