Tue, 05 May 2009 22:15:35 -0700
6833576: G1: assert illegal index, growableArray.hpp:186
Summary: The code that calculates the heap region index for an object address incorrectly used signed arithmetic.
Reviewed-by: jcoomes, ysr
1 /*
2 * Copyright 2001-2008 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
25 #include "incls/_precompiled.incl"
26 #include "incls/_g1MarkSweep.cpp.incl"
28 class HeapRegion;
30 void G1MarkSweep::invoke_at_safepoint(ReferenceProcessor* rp,
31 bool clear_all_softrefs) {
32 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
34 // hook up weak ref data so it can be used during Mark-Sweep
35 assert(GenMarkSweep::ref_processor() == NULL, "no stomping");
36 assert(rp != NULL, "should be non-NULL");
37 GenMarkSweep::_ref_processor = rp;
38 rp->setup_policy(clear_all_softrefs);
40 // When collecting the permanent generation methodOops may be moving,
41 // so we either have to flush all bcp data or convert it into bci.
42 CodeCache::gc_prologue();
43 Threads::gc_prologue();
45 // Increment the invocation count for the permanent generation, since it is
46 // implicitly collected whenever we do a full mark sweep collection.
47 SharedHeap* sh = SharedHeap::heap();
48 sh->perm_gen()->stat_record()->invocations++;
50 bool marked_for_unloading = false;
52 allocate_stacks();
54 // We should save the marks of the currently locked biased monitors.
55 // The marking doesn't preserve the marks of biased objects.
56 BiasedLocking::preserve_marks();
58 mark_sweep_phase1(marked_for_unloading, clear_all_softrefs);
60 if (VerifyDuringGC) {
61 G1CollectedHeap* g1h = G1CollectedHeap::heap();
62 g1h->checkConcurrentMark();
63 }
65 mark_sweep_phase2();
67 // Don't add any more derived pointers during phase3
68 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
70 mark_sweep_phase3();
72 mark_sweep_phase4();
74 GenMarkSweep::restore_marks();
75 BiasedLocking::restore_marks();
76 GenMarkSweep::deallocate_stacks();
78 // We must invalidate the perm-gen rs, so that it gets rebuilt.
79 GenRemSet* rs = sh->rem_set();
80 rs->invalidate(sh->perm_gen()->used_region(), true /*whole_heap*/);
82 // "free at last gc" is calculated from these.
83 // CHF: cheating for now!!!
84 // Universe::set_heap_capacity_at_last_gc(Universe::heap()->capacity());
85 // Universe::set_heap_used_at_last_gc(Universe::heap()->used());
87 Threads::gc_epilogue();
88 CodeCache::gc_epilogue();
90 // refs processing: clean slate
91 GenMarkSweep::_ref_processor = NULL;
92 }
95 void G1MarkSweep::allocate_stacks() {
96 GenMarkSweep::_preserved_count_max = 0;
97 GenMarkSweep::_preserved_marks = NULL;
98 GenMarkSweep::_preserved_count = 0;
99 GenMarkSweep::_preserved_mark_stack = NULL;
100 GenMarkSweep::_preserved_oop_stack = NULL;
102 GenMarkSweep::_marking_stack =
103 new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
105 size_t size = SystemDictionary::number_of_classes() * 2;
106 GenMarkSweep::_revisit_klass_stack =
107 new (ResourceObj::C_HEAP) GrowableArray<Klass*>((int)size, true);
108 }
110 void G1MarkSweep::mark_sweep_phase1(bool& marked_for_unloading,
111 bool clear_all_softrefs) {
112 // Recursively traverse all live objects and mark them
113 EventMark m("1 mark object");
114 TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
115 GenMarkSweep::trace(" 1");
117 SharedHeap* sh = SharedHeap::heap();
119 sh->process_strong_roots(true, // Collecting permanent generation.
120 SharedHeap::SO_SystemClasses,
121 &GenMarkSweep::follow_root_closure,
122 &GenMarkSweep::follow_root_closure);
124 // Process reference objects found during marking
125 ReferenceProcessor* rp = GenMarkSweep::ref_processor();
126 rp->setup_policy(clear_all_softrefs);
127 rp->process_discovered_references(&GenMarkSweep::is_alive,
128 &GenMarkSweep::keep_alive,
129 &GenMarkSweep::follow_stack_closure,
130 NULL);
132 // Follow system dictionary roots and unload classes
133 bool purged_class = SystemDictionary::do_unloading(&GenMarkSweep::is_alive);
134 assert(GenMarkSweep::_marking_stack->is_empty(),
135 "stack should be empty by now");
137 // Follow code cache roots (has to be done after system dictionary,
138 // assumes all live klasses are marked)
139 CodeCache::do_unloading(&GenMarkSweep::is_alive,
140 &GenMarkSweep::keep_alive,
141 purged_class);
142 GenMarkSweep::follow_stack();
144 // Update subklass/sibling/implementor links of live klasses
145 GenMarkSweep::follow_weak_klass_links();
146 assert(GenMarkSweep::_marking_stack->is_empty(),
147 "stack should be empty by now");
149 // Visit symbol and interned string tables and delete unmarked oops
150 SymbolTable::unlink(&GenMarkSweep::is_alive);
151 StringTable::unlink(&GenMarkSweep::is_alive);
153 assert(GenMarkSweep::_marking_stack->is_empty(),
154 "stack should be empty by now");
155 }
157 class G1PrepareCompactClosure: public HeapRegionClosure {
158 ModRefBarrierSet* _mrbs;
159 CompactPoint _cp;
161 void free_humongous_region(HeapRegion* hr) {
162 HeapWord* bot = hr->bottom();
163 HeapWord* end = hr->end();
164 assert(hr->startsHumongous(),
165 "Only the start of a humongous region should be freed.");
166 G1CollectedHeap::heap()->free_region(hr);
167 hr->prepare_for_compaction(&_cp);
168 // Also clear the part of the card table that will be unused after
169 // compaction.
170 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
171 }
173 public:
174 G1PrepareCompactClosure(CompactibleSpace* cs) :
175 _cp(NULL, cs, cs->initialize_threshold()),
176 _mrbs(G1CollectedHeap::heap()->mr_bs())
177 {}
178 bool doHeapRegion(HeapRegion* hr) {
179 if (hr->isHumongous()) {
180 if (hr->startsHumongous()) {
181 oop obj = oop(hr->bottom());
182 if (obj->is_gc_marked()) {
183 obj->forward_to(obj);
184 } else {
185 free_humongous_region(hr);
186 }
187 } else {
188 assert(hr->continuesHumongous(), "Invalid humongous.");
189 }
190 } else {
191 hr->prepare_for_compaction(&_cp);
192 // Also clear the part of the card table that will be unused after
193 // compaction.
194 _mrbs->clear(MemRegion(hr->compaction_top(), hr->end()));
195 }
196 return false;
197 }
198 };
200 // Finds the first HeapRegion.
201 class FindFirstRegionClosure: public HeapRegionClosure {
202 HeapRegion* _a_region;
203 public:
204 FindFirstRegionClosure() : _a_region(NULL) {}
205 bool doHeapRegion(HeapRegion* r) {
206 _a_region = r;
207 return true;
208 }
209 HeapRegion* result() { return _a_region; }
210 };
212 void G1MarkSweep::mark_sweep_phase2() {
213 // Now all live objects are marked, compute the new object addresses.
215 // It is imperative that we traverse perm_gen LAST. If dead space is
216 // allowed a range of dead object may get overwritten by a dead int
217 // array. If perm_gen is not traversed last a klassOop may get
218 // overwritten. This is fine since it is dead, but if the class has dead
219 // instances we have to skip them, and in order to find their size we
220 // need the klassOop!
221 //
222 // It is not required that we traverse spaces in the same order in
223 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
224 // tracking expects us to do so. See comment under phase4.
226 G1CollectedHeap* g1h = G1CollectedHeap::heap();
227 Generation* pg = g1h->perm_gen();
229 EventMark m("2 compute new addresses");
230 TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty);
231 GenMarkSweep::trace("2");
233 FindFirstRegionClosure cl;
234 g1h->heap_region_iterate(&cl);
235 HeapRegion *r = cl.result();
236 CompactibleSpace* sp = r;
237 if (r->isHumongous() && oop(r->bottom())->is_gc_marked()) {
238 sp = r->next_compaction_space();
239 }
241 G1PrepareCompactClosure blk(sp);
242 g1h->heap_region_iterate(&blk);
244 CompactPoint perm_cp(pg, NULL, NULL);
245 pg->prepare_for_compaction(&perm_cp);
246 }
248 class G1AdjustPointersClosure: public HeapRegionClosure {
249 public:
250 bool doHeapRegion(HeapRegion* r) {
251 if (r->isHumongous()) {
252 if (r->startsHumongous()) {
253 // We must adjust the pointers on the single H object.
254 oop obj = oop(r->bottom());
255 debug_only(GenMarkSweep::track_interior_pointers(obj));
256 // point all the oops to the new location
257 obj->adjust_pointers();
258 debug_only(GenMarkSweep::check_interior_pointers());
259 }
260 } else {
261 // This really ought to be "as_CompactibleSpace"...
262 r->adjust_pointers();
263 }
264 return false;
265 }
266 };
268 void G1MarkSweep::mark_sweep_phase3() {
269 G1CollectedHeap* g1h = G1CollectedHeap::heap();
270 Generation* pg = g1h->perm_gen();
272 // Adjust the pointers to reflect the new locations
273 EventMark m("3 adjust pointers");
274 TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty);
275 GenMarkSweep::trace("3");
277 SharedHeap* sh = SharedHeap::heap();
279 sh->process_strong_roots(true, // Collecting permanent generation.
280 SharedHeap::SO_AllClasses,
281 &GenMarkSweep::adjust_root_pointer_closure,
282 &GenMarkSweep::adjust_pointer_closure);
284 g1h->ref_processor()->weak_oops_do(&GenMarkSweep::adjust_root_pointer_closure);
286 // Now adjust pointers in remaining weak roots. (All of which should
287 // have been cleared if they pointed to non-surviving objects.)
288 g1h->g1_process_weak_roots(&GenMarkSweep::adjust_root_pointer_closure,
289 &GenMarkSweep::adjust_pointer_closure);
291 GenMarkSweep::adjust_marks();
293 G1AdjustPointersClosure blk;
294 g1h->heap_region_iterate(&blk);
295 pg->adjust_pointers();
296 }
298 class G1SpaceCompactClosure: public HeapRegionClosure {
299 public:
300 G1SpaceCompactClosure() {}
302 bool doHeapRegion(HeapRegion* hr) {
303 if (hr->isHumongous()) {
304 if (hr->startsHumongous()) {
305 oop obj = oop(hr->bottom());
306 if (obj->is_gc_marked()) {
307 obj->init_mark();
308 } else {
309 assert(hr->is_empty(), "Should have been cleared in phase 2.");
310 }
311 hr->reset_during_compaction();
312 }
313 } else {
314 hr->compact();
315 }
316 return false;
317 }
318 };
320 void G1MarkSweep::mark_sweep_phase4() {
321 // All pointers are now adjusted, move objects accordingly
323 // It is imperative that we traverse perm_gen first in phase4. All
324 // classes must be allocated earlier than their instances, and traversing
325 // perm_gen first makes sure that all klassOops have moved to their new
326 // location before any instance does a dispatch through it's klass!
328 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
329 // in the same order in phase2, phase3 and phase4. We don't quite do that
330 // here (perm_gen first rather than last), so we tell the validate code
331 // to use a higher index (saved from phase2) when verifying perm_gen.
332 G1CollectedHeap* g1h = G1CollectedHeap::heap();
333 Generation* pg = g1h->perm_gen();
335 EventMark m("4 compact heap");
336 TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty);
337 GenMarkSweep::trace("4");
339 pg->compact();
341 G1SpaceCompactClosure blk;
342 g1h->heap_region_iterate(&blk);
344 }
346 // Local Variables: ***
347 // c-indentation-style: gnu ***
348 // End: ***