src/share/vm/gc_implementation/g1/g1MarkSweep.cpp

Mon, 02 Aug 2010 12:51:43 -0700

author
johnc
date
Mon, 02 Aug 2010 12:51:43 -0700
changeset 2060
2d160770d2e5
parent 1907
c18cbe5936b8
child 2191
894b1d7c7e01
permissions
-rw-r--r--

6814437: G1: remove the _new_refs array
Summary: The per-worker _new_refs array is used to hold references that point into the collection set. It is populated during RSet updating and subsequently processed. In the event of an evacuation failure it processed again to recreate the RSets of regions in the collection set. Remove the per-worker _new_refs array by processing the references directly. Use a DirtyCardQueue to hold the cards containing the references so that the RSets of regions in the collection set can be recreated when handling an evacuation failure.
Reviewed-by: iveresov, jmasa, tonyp

ysr@777 1 /*
trims@1907 2 * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
ysr@777 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
ysr@777 4 *
ysr@777 5 * This code is free software; you can redistribute it and/or modify it
ysr@777 6 * under the terms of the GNU General Public License version 2 only, as
ysr@777 7 * published by the Free Software Foundation.
ysr@777 8 *
ysr@777 9 * This code is distributed in the hope that it will be useful, but WITHOUT
ysr@777 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
ysr@777 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
ysr@777 12 * version 2 for more details (a copy is included in the LICENSE file that
ysr@777 13 * accompanied this code).
ysr@777 14 *
ysr@777 15 * You should have received a copy of the GNU General Public License version
ysr@777 16 * 2 along with this work; if not, write to the Free Software Foundation,
ysr@777 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
ysr@777 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
ysr@777 22 *
ysr@777 23 */
ysr@777 24
ysr@777 25 #include "incls/_precompiled.incl"
ysr@777 26 #include "incls/_g1MarkSweep.cpp.incl"
ysr@777 27
ysr@777 28 class HeapRegion;
ysr@777 29
ysr@777 30 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
ysr@777 31 bool clear_all_softrefs) {
ysr@777 32 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
ysr@777 33
jmasa@1822 34 SharedHeap* sh = SharedHeap::heap();
jmasa@1822 35 #ifdef ASSERT
jmasa@1822 36 if (sh->collector_policy()->should_clear_all_soft_refs()) {
jmasa@1822 37 assert(clear_all_softrefs, "Policy should have been checked earler");
jmasa@1822 38 }
jmasa@1822 39 #endif
ysr@777 40 // hook up weak ref data so it can be used during Mark-Sweep
ysr@777 41 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
ysr@888 42 assert(rp != NULL, "should be non-NULL");
ysr@777 43 GenMarkSweep::_ref_processor = rp;
ysr@892 44 rp->setup_policy(clear_all_softrefs);
ysr@777 45
ysr@777 46 // When collecting the permanent generation methodOops may be moving,
ysr@777 47 // so we either have to flush all bcp data or convert it into bci.
ysr@777 48 CodeCache::gc_prologue();
ysr@777 49 Threads::gc_prologue();
ysr@777 50
ysr@777 51 // Increment the invocation count for the permanent generation, since it is
ysr@777 52 // implicitly collected whenever we do a full mark sweep collection.
ysr@777 53 sh->perm_gen()->stat_record()->invocations++;
ysr@777 54
ysr@777 55 bool marked_for_unloading = false;
ysr@777 56
ysr@777 57 allocate_stacks();
ysr@777 58
iveresov@793 59 // We should save the marks of the currently locked biased monitors.
iveresov@793 60 // The marking doesn't preserve the marks of biased objects.
iveresov@793 61 BiasedLocking::preserve_marks();
iveresov@793 62
ysr@777 63 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
ysr@777 64
johnc@1186 65 if (VerifyDuringGC) {
ysr@777 66 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@777 67 g1h->checkConcurrentMark();
ysr@777 68 }
ysr@777 69
ysr@777 70 mark_sweep_phase2();
ysr@777 71
ysr@777 72 // Don't add any more derived pointers during phase3
ysr@777 73 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
ysr@777 74
ysr@777 75 mark_sweep_phase3();
ysr@777 76
ysr@777 77 mark_sweep_phase4();
ysr@777 78
ysr@777 79 GenMarkSweep::restore_marks();
iveresov@793 80 BiasedLocking::restore_marks();
ysr@777 81 GenMarkSweep::deallocate_stacks();
ysr@777 82
ysr@777 83 // We must invalidate the perm-gen rs, so that it gets rebuilt.
ysr@777 84 GenRemSet* rs = sh->rem_set();
ysr@777 85 rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/);
ysr@777 86
ysr@777 87 // "free at last gc" is calculated from these.
ysr@777 88 // CHF: cheating for now!!!
ysr@777 89 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
ysr@777 90 // Universe::set_heap_used_at_last_gc(Universe::heap()->used());
ysr@777 91
ysr@777 92 Threads::gc_epilogue();
ysr@777 93 CodeCache::gc_epilogue();
ysr@777 94
ysr@777 95 // refs processing: clean slate
ysr@777 96 GenMarkSweep::_ref_processor = NULL;
ysr@777 97 }
ysr@777 98
ysr@777 99
ysr@777 100 void G1MarkSweep::allocate_stacks() {
ysr@777 101 GenMarkSweep::_preserved_count_max = 0;
ysr@777 102 GenMarkSweep::_preserved_marks = NULL;
ysr@777 103 GenMarkSweep::_preserved_count = 0;
ysr@777 104 GenMarkSweep::_preserved_mark_stack = NULL;
ysr@777 105 GenMarkSweep::_preserved_oop_stack = NULL;
ysr@777 106
ysr@777 107 GenMarkSweep::_marking_stack =
ysr@777 108 new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
jcoomes@1746 109 GenMarkSweep::_objarray_stack =
jcoomes@1746 110 new (ResourceObj::C_HEAP) GrowableArray<ObjArrayTask>(50, true);
ysr@777 111
ysr@1376 112 int size = SystemDictionary::number_of_classes() * 2;
ysr@777 113 GenMarkSweep::_revisit_klass_stack =
ysr@1376 114 new (ResourceObj::C_HEAP) GrowableArray<Klass*>(size, true);
ysr@1376 115 // (#klass/k)^2 for k ~ 10 appears a better fit, but this will have to do
ysr@1376 116 // for now until we have a chance to work out a more optimal setting.
ysr@1376 117 GenMarkSweep::_revisit_mdo_stack =
ysr@1376 118 new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(size*2, true);
ysr@1376 119
ysr@777 120 }
ysr@777 121
ysr@777 122 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
ysr@777 123 bool clear_all_softrefs) {
ysr@777 124 // Recursively traverse all live objects and mark them
ysr@777 125 EventMark m("1 mark object");
ysr@777 126 TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
ysr@777 127 GenMarkSweep::trace(" 1");
ysr@777 128
ysr@777 129 SharedHeap* sh = SharedHeap::heap();
ysr@777 130
jrose@1424 131 sh->process_strong_roots(true, // activeate StrongRootsScope
jrose@1424 132 true, // Collecting permanent generation.
ysr@777 133 SharedHeap::SO_SystemClasses,
ysr@777 134 &GenMarkSweep::follow_root_closure,
jrose@1424 135 &GenMarkSweep::follow_code_root_closure,
ysr@777 136 &GenMarkSweep::follow_root_closure);
ysr@777 137
ysr@777 138 // Process reference objects found during marking
ysr@888 139 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
ysr@892 140 rp->setup_policy(clear_all_softrefs);
ysr@888 141 rp->process_discovered_references(&GenMarkSweep::is_alive,
ysr@888 142 &GenMarkSweep::keep_alive,
ysr@888 143 &GenMarkSweep::follow_stack_closure,
ysr@888 144 NULL);
ysr@777 145
ysr@777 146 // Follow system dictionary roots and unload classes
ysr@777 147 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
ysr@777 148 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@777 149 "stack should be empty by now");
ysr@777 150
ysr@777 151 // Follow code cache roots (has to be done after system dictionary,
ysr@777 152 // assumes all live klasses are marked)
ysr@777 153 CodeCache::do_unloading(&GenMarkSweep::is_alive,
ysr@777 154 &GenMarkSweep::keep_alive,
ysr@777 155 purged_class);
ysr@1376 156 GenMarkSweep::follow_stack();
ysr@777 157
ysr@777 158 // Update subklass/sibling/implementor links of live klasses
ysr@777 159 GenMarkSweep::follow_weak_klass_links();
ysr@777 160 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@777 161 "stack should be empty by now");
ysr@777 162
ysr@1376 163 // Visit memoized MDO's and clear any unmarked weak refs
ysr@1376 164 GenMarkSweep::follow_mdo_weak_refs();
ysr@1376 165 assert(GenMarkSweep::_marking_stack->is_empty(), "just drained");
ysr@1376 166
ysr@1376 167
ysr@777 168 // Visit symbol and interned string tables and delete unmarked oops
ysr@777 169 SymbolTable::unlink(&GenMarkSweep::is_alive);
ysr@777 170 StringTable::unlink(&GenMarkSweep::is_alive);
ysr@777 171
ysr@777 172 assert(GenMarkSweep::_marking_stack->is_empty(),
ysr@777 173 "stack should be empty by now");
ysr@777 174 }
ysr@777 175
ysr@777 176 class G1PrepareCompactClosure: public HeapRegionClosure {
ysr@777 177 ModRefBarrierSet* _mrbs;
ysr@777 178 CompactPoint _cp;
ysr@777 179
ysr@777 180 void free_humongous_region(HeapRegion* hr) {
ysr@777 181 HeapWord* bot = hr->bottom();
ysr@777 182 HeapWord* end = hr->end();
ysr@777 183 assert(hr->startsHumongous(),
ysr@777 184 "Only the start of a humongous region should be freed.");
ysr@777 185 G1CollectedHeap::heap()->free_region(hr);
ysr@777 186 hr->prepare_for_compaction(&_cp);
ysr@777 187 // Also clear the part of the card table that will be unused after
ysr@777 188 // compaction.
ysr@777 189 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
ysr@777 190 }
ysr@777 191
ysr@777 192 public:
apetrusenko@1112 193 G1PrepareCompactClosure(CompactibleSpace* cs) :
ysr@777 194 _cp(NULL, cs, cs->initialize_threshold()),
apetrusenko@1112 195 _mrbs(G1CollectedHeap::heap()->mr_bs())
ysr@777 196 {}
ysr@777 197 bool doHeapRegion(HeapRegion* hr) {
ysr@777 198 if (hr->isHumongous()) {
ysr@777 199 if (hr->startsHumongous()) {
ysr@777 200 oop obj = oop(hr->bottom());
ysr@777 201 if (obj->is_gc_marked()) {
ysr@777 202 obj->forward_to(obj);
ysr@777 203 } else {
ysr@777 204 free_humongous_region(hr);
ysr@777 205 }
ysr@777 206 } else {
ysr@777 207 assert(hr->continuesHumongous(), "Invalid humongous.");
ysr@777 208 }
ysr@777 209 } else {
ysr@777 210 hr->prepare_for_compaction(&_cp);
ysr@777 211 // Also clear the part of the card table that will be unused after
ysr@777 212 // compaction.
ysr@777 213 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
ysr@777 214 }
ysr@777 215 return false;
ysr@777 216 }
ysr@777 217 };
apetrusenko@1112 218
apetrusenko@1112 219 // Finds the first HeapRegion.
ysr@777 220 class FindFirstRegionClosure: public HeapRegionClosure {
ysr@777 221 HeapRegion* _a_region;
ysr@777 222 public:
apetrusenko@1112 223 FindFirstRegionClosure() : _a_region(NULL) {}
ysr@777 224 bool doHeapRegion(HeapRegion* r) {
apetrusenko@1112 225 _a_region = r;
apetrusenko@1112 226 return true;
ysr@777 227 }
ysr@777 228 HeapRegion* result() { return _a_region; }
ysr@777 229 };
ysr@777 230
ysr@777 231 void G1MarkSweep::mark_sweep_phase2() {
ysr@777 232 // Now all live objects are marked, compute the new object addresses.
ysr@777 233
ysr@777 234 // It is imperative that we traverse perm_gen LAST. If dead space is
ysr@777 235 // allowed a range of dead object may get overwritten by a dead int
ysr@777 236 // array. If perm_gen is not traversed last a klassOop may get
ysr@777 237 // overwritten. This is fine since it is dead, but if the class has dead
ysr@777 238 // instances we have to skip them, and in order to find their size we
ysr@777 239 // need the klassOop!
ysr@777 240 //
ysr@777 241 // It is not required that we traverse spaces in the same order in
ysr@777 242 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
ysr@777 243 // tracking expects us to do so. See comment under phase4.
ysr@777 244
ysr@777 245 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@777 246 Generation* pg = g1h->perm_gen();
ysr@777 247
ysr@777 248 EventMark m("2 compute new addresses");
ysr@777 249 TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty);
ysr@777 250 GenMarkSweep::trace("2");
ysr@777 251
apetrusenko@1112 252 FindFirstRegionClosure cl;
ysr@777 253 g1h->heap_region_iterate(&cl);
ysr@777 254 HeapRegion *r = cl.result();
ysr@777 255 CompactibleSpace* sp = r;
ysr@777 256 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
ysr@777 257 sp = r->next_compaction_space();
ysr@777 258 }
ysr@777 259
apetrusenko@1112 260 G1PrepareCompactClosure blk(sp);
ysr@777 261 g1h->heap_region_iterate(&blk);
ysr@777 262
ysr@777 263 CompactPoint perm_cp(pg, NULL, NULL);
ysr@777 264 pg->prepare_for_compaction(&perm_cp);
ysr@777 265 }
ysr@777 266
ysr@777 267 class G1AdjustPointersClosure: public HeapRegionClosure {
ysr@777 268 public:
ysr@777 269 bool doHeapRegion(HeapRegion* r) {
ysr@777 270 if (r->isHumongous()) {
ysr@777 271 if (r->startsHumongous()) {
ysr@777 272 // We must adjust the pointers on the single H object.
ysr@777 273 oop obj = oop(r->bottom());
ysr@777 274 debug_only(GenMarkSweep::track_interior_pointers(obj));
ysr@777 275 // point all the oops to the new location
ysr@777 276 obj->adjust_pointers();
ysr@777 277 debug_only(GenMarkSweep::check_interior_pointers());
ysr@777 278 }
ysr@777 279 } else {
ysr@777 280 // This really ought to be "as_CompactibleSpace"...
ysr@777 281 r->adjust_pointers();
ysr@777 282 }
ysr@777 283 return false;
ysr@777 284 }
ysr@777 285 };
ysr@777 286
ysr@777 287 void G1MarkSweep::mark_sweep_phase3() {
ysr@777 288 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@777 289 Generation* pg = g1h->perm_gen();
ysr@777 290
ysr@777 291 // Adjust the pointers to reflect the new locations
ysr@777 292 EventMark m("3 adjust pointers");
ysr@777 293 TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty);
ysr@777 294 GenMarkSweep::trace("3");
ysr@777 295
ysr@777 296 SharedHeap* sh = SharedHeap::heap();
ysr@777 297
jrose@1424 298 sh->process_strong_roots(true, // activate StrongRootsScope
jrose@1424 299 true, // Collecting permanent generation.
ysr@777 300 SharedHeap::SO_AllClasses,
ysr@777 301 &GenMarkSweep::adjust_root_pointer_closure,
jrose@1424 302 NULL, // do not touch code cache here
ysr@777 303 &GenMarkSweep::adjust_pointer_closure);
ysr@777 304
ysr@777 305 g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
ysr@777 306
ysr@777 307 // Now adjust pointers in remaining weak roots. (All of which should
ysr@777 308 // have been cleared if they pointed to non-surviving objects.)
ysr@777 309 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
ysr@777 310 &GenMarkSweep::adjust_pointer_closure);
ysr@777 311
ysr@777 312 GenMarkSweep::adjust_marks();
ysr@777 313
ysr@777 314 G1AdjustPointersClosure blk;
ysr@777 315 g1h->heap_region_iterate(&blk);
ysr@777 316 pg->adjust_pointers();
ysr@777 317 }
ysr@777 318
ysr@777 319 class G1SpaceCompactClosure: public HeapRegionClosure {
ysr@777 320 public:
ysr@777 321 G1SpaceCompactClosure() {}
ysr@777 322
ysr@777 323 bool doHeapRegion(HeapRegion* hr) {
ysr@777 324 if (hr->isHumongous()) {
ysr@777 325 if (hr->startsHumongous()) {
ysr@777 326 oop obj = oop(hr->bottom());
ysr@777 327 if (obj->is_gc_marked()) {
ysr@777 328 obj->init_mark();
ysr@777 329 } else {
ysr@777 330 assert(hr->is_empty(), "Should have been cleared in phase 2.");
ysr@777 331 }
ysr@777 332 hr->reset_during_compaction();
ysr@777 333 }
ysr@777 334 } else {
ysr@777 335 hr->compact();
ysr@777 336 }
ysr@777 337 return false;
ysr@777 338 }
ysr@777 339 };
ysr@777 340
ysr@777 341 void G1MarkSweep::mark_sweep_phase4() {
ysr@777 342 // All pointers are now adjusted, move objects accordingly
ysr@777 343
ysr@777 344 // It is imperative that we traverse perm_gen first in phase4. All
ysr@777 345 // classes must be allocated earlier than their instances, and traversing
ysr@777 346 // perm_gen first makes sure that all klassOops have moved to their new
ysr@777 347 // location before any instance does a dispatch through it's klass!
ysr@777 348
ysr@777 349 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
ysr@777 350 // in the same order in phase2, phase3 and phase4. We don't quite do that
ysr@777 351 // here (perm_gen first rather than last), so we tell the validate code
ysr@777 352 // to use a higher index (saved from phase2) when verifying perm_gen.
ysr@777 353 G1CollectedHeap* g1h = G1CollectedHeap::heap();
ysr@777 354 Generation* pg = g1h->perm_gen();
ysr@777 355
ysr@777 356 EventMark m("4 compact heap");
ysr@777 357 TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty);
ysr@777 358 GenMarkSweep::trace("4");
ysr@777 359
ysr@777 360 pg->compact();
ysr@777 361
ysr@777 362 G1SpaceCompactClosure blk;
ysr@777 363 g1h->heap_region_iterate(&blk);
ysr@777 364
ysr@777 365 }
ysr@777 366
ysr@777 367 // Local Variables: ***
ysr@777 368 // c-indentation-style: gnu ***
ysr@777 369 // End: ***

mercurial