Sat, 06 Oct 2012 01:17:44 -0700
7127708: G1: change task num types from int to uint in concurrent mark
Summary: Change the type of various task num fields, parameters etc to unsigned and rename them to be more consistent with the other collectors. Code changes were also reviewed by Vitaly Davidovich.
Reviewed-by: johnc
Contributed-by: Kaushik Srenevasan <kaushik@twitter.com>
1 /*
2 * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/symbolTable.hpp"
28 #include "classfile/systemDictionary.hpp"
29 #include "classfile/vmSymbols.hpp"
30 #include "code/codeCache.hpp"
31 #include "code/icBuffer.hpp"
32 #include "gc_implementation/g1/g1Log.hpp"
33 #include "gc_implementation/g1/g1MarkSweep.hpp"
34 #include "memory/gcLocker.hpp"
35 #include "memory/genCollectedHeap.hpp"
36 #include "memory/modRefBarrierSet.hpp"
37 #include "memory/referencePolicy.hpp"
38 #include "memory/space.hpp"
39 #include "oops/instanceRefKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "prims/jvmtiExport.hpp"
42 #include "runtime/aprofiler.hpp"
43 #include "runtime/biasedLocking.hpp"
44 #include "runtime/fprofiler.hpp"
45 #include "runtime/synchronizer.hpp"
46 #include "runtime/thread.hpp"
47 #include "runtime/vmThread.hpp"
48 #include "utilities/copy.hpp"
49 #include "utilities/events.hpp"
51 class HeapRegion;
53 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
54 bool clear_all_softrefs) {
55 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
57 SharedHeap* sh = SharedHeap::heap();
58 #ifdef ASSERT
59 if (sh->collector_policy()->should_clear_all_soft_refs()) {
60 assert(clear_all_softrefs, "Policy should have been checked earler");
61 }
62 #endif
63 // hook up weak ref data so it can be used during Mark-Sweep
64 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
65 assert(rp != NULL, "should be non-NULL");
66 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Precondition");
68 GenMarkSweep::_ref_processor = rp;
69 rp->setup_policy(clear_all_softrefs);
71 // When collecting the permanent generation Method*s may be moving,
72 // so we either have to flush all bcp data or convert it into bci.
73 CodeCache::gc_prologue();
74 Threads::gc_prologue();
76 bool marked_for_unloading = false;
78 allocate_stacks();
80 // We should save the marks of the currently locked biased monitors.
81 // The marking doesn't preserve the marks of biased objects.
82 BiasedLocking::preserve_marks();
84 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
86 mark_sweep_phase2();
88 // Don't add any more derived pointers during phase3
89 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
91 mark_sweep_phase3();
93 mark_sweep_phase4();
95 GenMarkSweep::restore_marks();
96 BiasedLocking::restore_marks();
97 GenMarkSweep::deallocate_stacks();
99 // "free at last gc" is calculated from these.
100 // CHF: cheating for now!!!
101 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
102 // Universe::set_heap_used_at_last_gc(Universe::heap()->used());
104 Threads::gc_epilogue();
105 CodeCache::gc_epilogue();
106 JvmtiExport::gc_epilogue();
108 // refs processing: clean slate
109 GenMarkSweep::_ref_processor = NULL;
110 }
113 void G1MarkSweep::allocate_stacks() {
114 GenMarkSweep::_preserved_count_max = 0;
115 GenMarkSweep::_preserved_marks = NULL;
116 GenMarkSweep::_preserved_count = 0;
117 }
119 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
120 bool clear_all_softrefs) {
121 // Recursively traverse all live objects and mark them
122 TraceTime tm("phase 1", G1Log::fine() && Verbose, true, gclog_or_tty);
123 GenMarkSweep::trace(" 1");
125 SharedHeap* sh = SharedHeap::heap();
127 // Need cleared claim bits for the strong roots processing
128 ClassLoaderDataGraph::clear_claimed_marks();
130 sh->process_strong_roots(true, // activate StrongRootsScope
131 false, // not scavenging.
132 SharedHeap::SO_SystemClasses,
133 &GenMarkSweep::follow_root_closure,
134 &GenMarkSweep::follow_code_root_closure,
135 &GenMarkSweep::follow_klass_closure);
137 // Process reference objects found during marking
138 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
139 assert(rp == G1CollectedHeap::heap()->ref_processor_stw(), "Sanity");
141 rp->setup_policy(clear_all_softrefs);
142 rp->process_discovered_references(&GenMarkSweep::is_alive,
143 &GenMarkSweep::keep_alive,
144 &GenMarkSweep::follow_stack_closure,
145 NULL);
147 // Follow system dictionary roots and unload classes
148 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
149 assert(GenMarkSweep::_marking_stack.is_empty(),
150 "stack should be empty by now");
152 // Follow code cache roots (has to be done after system dictionary,
153 // assumes all live klasses are marked)
154 CodeCache::do_unloading(&GenMarkSweep::is_alive, purged_class);
155 GenMarkSweep::follow_stack();
157 // Update subklass/sibling/implementor links of live klasses
158 Klass::clean_weak_klass_links(&GenMarkSweep::is_alive);
159 assert(GenMarkSweep::_marking_stack.is_empty(),
160 "stack should be empty by now");
162 // Visit interned string tables and delete unmarked oops
163 StringTable::unlink(&GenMarkSweep::is_alive);
164 // Clean up unreferenced symbols in symbol table.
165 SymbolTable::unlink();
167 assert(GenMarkSweep::_marking_stack.is_empty(),
168 "stack should be empty by now");
170 if (VerifyDuringGC) {
171 HandleMark hm; // handle scope
172 COMPILER2_PRESENT(DerivedPointerTableDeactivate dpt_deact);
173 gclog_or_tty->print(" VerifyDuringGC:(full)[Verifying ");
174 Universe::heap()->prepare_for_verify();
175 // Note: we can verify only the heap here. When an object is
176 // marked, the previous value of the mark word (including
177 // identity hash values, ages, etc) is preserved, and the mark
178 // word is set to markOop::marked_value - effectively removing
179 // any hash values from the mark word. These hash values are
180 // used when verifying the dictionaries and so removing them
181 // from the mark word can make verification of the dictionaries
182 // fail. At the end of the GC, the orginal mark word values
183 // (including hash values) are restored to the appropriate
184 // objects.
185 Universe::heap()->verify(/* silent */ false,
186 /* option */ VerifyOption_G1UseMarkWord);
188 G1CollectedHeap* g1h = G1CollectedHeap::heap();
189 gclog_or_tty->print_cr("]");
190 }
191 }
193 class G1PrepareCompactClosure: public HeapRegionClosure {
194 G1CollectedHeap* _g1h;
195 ModRefBarrierSet* _mrbs;
196 CompactPoint _cp;
197 HumongousRegionSet _humongous_proxy_set;
199 void free_humongous_region(HeapRegion* hr) {
200 HeapWord* end = hr->end();
201 size_t dummy_pre_used;
202 FreeRegionList dummy_free_list("Dummy Free List for G1MarkSweep");
204 assert(hr->startsHumongous(),
205 "Only the start of a humongous region should be freed.");
206 _g1h->free_humongous_region(hr, &dummy_pre_used, &dummy_free_list,
207 &_humongous_proxy_set, false /* par */);
208 hr->prepare_for_compaction(&_cp);
209 // Also clear the part of the card table that will be unused after
210 // compaction.
211 _mrbs->clear(MemRegion(hr->compaction_top(), end));
212 dummy_free_list.remove_all();
213 }
215 public:
216 G1PrepareCompactClosure(CompactibleSpace* cs)
217 : _g1h(G1CollectedHeap::heap()),
218 _mrbs(G1CollectedHeap::heap()->mr_bs()),
219 _cp(NULL, cs, cs->initialize_threshold()),
220 _humongous_proxy_set("G1MarkSweep Humongous Proxy Set") { }
222 void update_sets() {
223 // We'll recalculate total used bytes and recreate the free list
224 // at the end of the GC, so no point in updating those values here.
225 _g1h->update_sets_after_freeing_regions(0, /* pre_used */
226 NULL, /* free_list */
227 NULL, /* old_proxy_set */
228 &_humongous_proxy_set,
229 false /* par */);
230 }
232 bool doHeapRegion(HeapRegion* hr) {
233 if (hr->isHumongous()) {
234 if (hr->startsHumongous()) {
235 oop obj = oop(hr->bottom());
236 if (obj->is_gc_marked()) {
237 obj->forward_to(obj);
238 } else {
239 free_humongous_region(hr);
240 }
241 } else {
242 assert(hr->continuesHumongous(), "Invalid humongous.");
243 }
244 } else {
245 hr->prepare_for_compaction(&_cp);
246 // Also clear the part of the card table that will be unused after
247 // compaction.
248 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
249 }
250 return false;
251 }
252 };
254 void G1MarkSweep::mark_sweep_phase2() {
255 // Now all live objects are marked, compute the new object addresses.
257 // It is not required that we traverse spaces in the same order in
258 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
259 // tracking expects us to do so. See comment under phase4.
261 G1CollectedHeap* g1h = G1CollectedHeap::heap();
263 TraceTime tm("phase 2", G1Log::fine() && Verbose, true, gclog_or_tty);
264 GenMarkSweep::trace("2");
266 // find the first region
267 HeapRegion* r = g1h->region_at(0);
268 CompactibleSpace* sp = r;
269 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
270 sp = r->next_compaction_space();
271 }
273 G1PrepareCompactClosure blk(sp);
274 g1h->heap_region_iterate(&blk);
275 blk.update_sets();
276 }
278 class G1AdjustPointersClosure: public HeapRegionClosure {
279 public:
280 bool doHeapRegion(HeapRegion* r) {
281 if (r->isHumongous()) {
282 if (r->startsHumongous()) {
283 // We must adjust the pointers on the single H object.
284 oop obj = oop(r->bottom());
285 debug_only(GenMarkSweep::track_interior_pointers(obj));
286 // point all the oops to the new location
287 obj->adjust_pointers();
288 debug_only(GenMarkSweep::check_interior_pointers());
289 }
290 } else {
291 // This really ought to be "as_CompactibleSpace"...
292 r->adjust_pointers();
293 }
294 return false;
295 }
296 };
298 void G1MarkSweep::mark_sweep_phase3() {
299 G1CollectedHeap* g1h = G1CollectedHeap::heap();
301 // Adjust the pointers to reflect the new locations
302 TraceTime tm("phase 3", G1Log::fine() && Verbose, true, gclog_or_tty);
303 GenMarkSweep::trace("3");
305 SharedHeap* sh = SharedHeap::heap();
307 // Need cleared claim bits for the strong roots processing
308 ClassLoaderDataGraph::clear_claimed_marks();
310 sh->process_strong_roots(true, // activate StrongRootsScope
311 false, // not scavenging.
312 SharedHeap::SO_AllClasses,
313 &GenMarkSweep::adjust_root_pointer_closure,
314 NULL, // do not touch code cache here
315 &GenMarkSweep::adjust_klass_closure);
317 assert(GenMarkSweep::ref_processor() == g1h->ref_processor_stw(), "Sanity");
318 g1h->ref_processor_stw()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
320 // Now adjust pointers in remaining weak roots. (All of which should
321 // have been cleared if they pointed to non-surviving objects.)
322 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
323 &GenMarkSweep::adjust_pointer_closure);
325 GenMarkSweep::adjust_marks();
327 G1AdjustPointersClosure blk;
328 g1h->heap_region_iterate(&blk);
329 }
331 class G1SpaceCompactClosure: public HeapRegionClosure {
332 public:
333 G1SpaceCompactClosure() {}
335 bool doHeapRegion(HeapRegion* hr) {
336 if (hr->isHumongous()) {
337 if (hr->startsHumongous()) {
338 oop obj = oop(hr->bottom());
339 if (obj->is_gc_marked()) {
340 obj->init_mark();
341 } else {
342 assert(hr->is_empty(), "Should have been cleared in phase 2.");
343 }
344 hr->reset_during_compaction();
345 }
346 } else {
347 hr->compact();
348 }
349 return false;
350 }
351 };
353 void G1MarkSweep::mark_sweep_phase4() {
354 // All pointers are now adjusted, move objects accordingly
356 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
357 // in the same order in phase2, phase3 and phase4. We don't quite do that
358 // here (code and comment not fixed for perm removal), so we tell the validate code
359 // to use a higher index (saved from phase2) when verifying perm_gen.
360 G1CollectedHeap* g1h = G1CollectedHeap::heap();
362 TraceTime tm("phase 4", G1Log::fine() && Verbose, true, gclog_or_tty);
363 GenMarkSweep::trace("4");
365 G1SpaceCompactClosure blk;
366 g1h->heap_region_iterate(&blk);
368 }