Tue, 11 May 2010 14:35:43 -0700
6931180: Migration to recent versions of MS Platform SDK
6951582: Build problems on win64
Summary: Changes to enable building JDK7 with Microsoft Visual Studio 2010
Reviewed-by: ohair, art, ccheung, dcubed
1 /*
2 * Copyright 2001-2010 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
25 #include "incls/_precompiled.incl"
26 #include "incls/_genMarkSweep.cpp.incl"
28 void GenMarkSweep::invoke_at_safepoint(int level, ReferenceProcessor* rp,
29 bool clear_all_softrefs) {
30 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint");
32 GenCollectedHeap* gch = GenCollectedHeap::heap();
33 #ifdef ASSERT
34 if (gch->collector_policy()->should_clear_all_soft_refs()) {
35 assert(clear_all_softrefs, "Policy should have been checked earlier");
36 }
37 #endif
39 // hook up weak ref data so it can be used during Mark-Sweep
40 assert(ref_processor() == NULL, "no stomping");
41 assert(rp != NULL, "should be non-NULL");
42 _ref_processor = rp;
43 rp->setup_policy(clear_all_softrefs);
45 TraceTime t1("Full GC", PrintGC && !PrintGCDetails, true, gclog_or_tty);
47 // When collecting the permanent generation methodOops may be moving,
48 // so we either have to flush all bcp data or convert it into bci.
49 CodeCache::gc_prologue();
50 Threads::gc_prologue();
52 // Increment the invocation count for the permanent generation, since it is
53 // implicitly collected whenever we do a full mark sweep collection.
54 gch->perm_gen()->stat_record()->invocations++;
56 // Capture heap size before collection for printing.
57 size_t gch_prev_used = gch->used();
59 // Some of the card table updates below assume that the perm gen is
60 // also being collected.
61 assert(level == gch->n_gens() - 1,
62 "All generations are being collected, ergo perm gen too.");
64 // Capture used regions for each generation that will be
65 // subject to collection, so that card table adjustments can
66 // be made intelligently (see clear / invalidate further below).
67 gch->save_used_regions(level, true /* perm */);
69 allocate_stacks();
71 mark_sweep_phase1(level, clear_all_softrefs);
73 mark_sweep_phase2();
75 // Don't add any more derived pointers during phase3
76 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
77 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
79 mark_sweep_phase3(level);
81 VALIDATE_MARK_SWEEP_ONLY(
82 if (ValidateMarkSweep) {
83 guarantee(_root_refs_stack->length() == 0, "should be empty by now");
84 }
85 )
87 mark_sweep_phase4();
89 VALIDATE_MARK_SWEEP_ONLY(
90 if (ValidateMarkSweep) {
91 guarantee(_live_oops->length() == _live_oops_moved_to->length(),
92 "should be the same size");
93 }
94 )
96 restore_marks();
98 // Set saved marks for allocation profiler (and other things? -- dld)
99 // (Should this be in general part?)
100 gch->save_marks();
102 deallocate_stacks();
104 // If compaction completely evacuated all generations younger than this
105 // one, then we can clear the card table. Otherwise, we must invalidate
106 // it (consider all cards dirty). In the future, we might consider doing
107 // compaction within generations only, and doing card-table sliding.
108 bool all_empty = true;
109 for (int i = 0; all_empty && i < level; i++) {
110 Generation* g = gch->get_gen(i);
111 all_empty = all_empty && gch->get_gen(i)->used() == 0;
112 }
113 GenRemSet* rs = gch->rem_set();
114 // Clear/invalidate below make use of the "prev_used_regions" saved earlier.
115 if (all_empty) {
116 // We've evacuated all generations below us.
117 Generation* g = gch->get_gen(level);
118 rs->clear_into_younger(g, true /* perm */);
119 } else {
120 // Invalidate the cards corresponding to the currently used
121 // region and clear those corresponding to the evacuated region
122 // of all generations just collected (i.e. level and younger).
123 rs->invalidate_or_clear(gch->get_gen(level),
124 true /* younger */,
125 true /* perm */);
126 }
128 Threads::gc_epilogue();
129 CodeCache::gc_epilogue();
131 if (PrintGC && !PrintGCDetails) {
132 gch->print_heap_change(gch_prev_used);
133 }
135 // refs processing: clean slate
136 _ref_processor = NULL;
138 // Update heap occupancy information which is used as
139 // input to soft ref clearing policy at the next gc.
140 Universe::update_heap_info_at_gc();
142 // Update time of last gc for all generations we collected
143 // (which curently is all the generations in the heap).
144 gch->update_time_of_last_gc(os::javaTimeMillis());
145 }
147 void GenMarkSweep::allocate_stacks() {
148 GenCollectedHeap* gch = GenCollectedHeap::heap();
149 // Scratch request on behalf of oldest generation; will do no
150 // allocation.
151 ScratchBlock* scratch = gch->gather_scratch(gch->_gens[gch->_n_gens-1], 0);
153 // $$$ To cut a corner, we'll only use the first scratch block, and then
154 // revert to malloc.
155 if (scratch != NULL) {
156 _preserved_count_max =
157 scratch->num_words * HeapWordSize / sizeof(PreservedMark);
158 } else {
159 _preserved_count_max = 0;
160 }
162 _preserved_marks = (PreservedMark*)scratch;
163 _preserved_count = 0;
164 _preserved_mark_stack = NULL;
165 _preserved_oop_stack = NULL;
167 _marking_stack = new (ResourceObj::C_HEAP) GrowableArray<oop>(4000, true);
168 _objarray_stack = new (ResourceObj::C_HEAP) GrowableArray<ObjArrayTask>(50, true);
170 int size = SystemDictionary::number_of_classes() * 2;
171 _revisit_klass_stack = new (ResourceObj::C_HEAP) GrowableArray<Klass*>(size, true);
172 // (#klass/k)^2 for k ~ 10 appears to be a better fit, but this will have to do for
173 // now until we have had a chance to investigate a more optimal setting.
174 _revisit_mdo_stack = new (ResourceObj::C_HEAP) GrowableArray<DataLayout*>(2*size, true);
176 #ifdef VALIDATE_MARK_SWEEP
177 if (ValidateMarkSweep) {
178 _root_refs_stack = new (ResourceObj::C_HEAP) GrowableArray<void*>(100, true);
179 _other_refs_stack = new (ResourceObj::C_HEAP) GrowableArray<void*>(100, true);
180 _adjusted_pointers = new (ResourceObj::C_HEAP) GrowableArray<void*>(100, true);
181 _live_oops = new (ResourceObj::C_HEAP) GrowableArray<oop>(100, true);
182 _live_oops_moved_to = new (ResourceObj::C_HEAP) GrowableArray<oop>(100, true);
183 _live_oops_size = new (ResourceObj::C_HEAP) GrowableArray<size_t>(100, true);
184 }
185 if (RecordMarkSweepCompaction) {
186 if (_cur_gc_live_oops == NULL) {
187 _cur_gc_live_oops = new(ResourceObj::C_HEAP) GrowableArray<HeapWord*>(100, true);
188 _cur_gc_live_oops_moved_to = new(ResourceObj::C_HEAP) GrowableArray<HeapWord*>(100, true);
189 _cur_gc_live_oops_size = new(ResourceObj::C_HEAP) GrowableArray<size_t>(100, true);
190 _last_gc_live_oops = new(ResourceObj::C_HEAP) GrowableArray<HeapWord*>(100, true);
191 _last_gc_live_oops_moved_to = new(ResourceObj::C_HEAP) GrowableArray<HeapWord*>(100, true);
192 _last_gc_live_oops_size = new(ResourceObj::C_HEAP) GrowableArray<size_t>(100, true);
193 } else {
194 _cur_gc_live_oops->clear();
195 _cur_gc_live_oops_moved_to->clear();
196 _cur_gc_live_oops_size->clear();
197 }
198 }
199 #endif
200 }
203 void GenMarkSweep::deallocate_stacks() {
204 if (!UseG1GC) {
205 GenCollectedHeap* gch = GenCollectedHeap::heap();
206 gch->release_scratch();
207 }
209 if (_preserved_oop_stack) {
210 delete _preserved_mark_stack;
211 _preserved_mark_stack = NULL;
212 delete _preserved_oop_stack;
213 _preserved_oop_stack = NULL;
214 }
216 delete _marking_stack;
217 delete _objarray_stack;
218 delete _revisit_klass_stack;
219 delete _revisit_mdo_stack;
221 #ifdef VALIDATE_MARK_SWEEP
222 if (ValidateMarkSweep) {
223 delete _root_refs_stack;
224 delete _other_refs_stack;
225 delete _adjusted_pointers;
226 delete _live_oops;
227 delete _live_oops_size;
228 delete _live_oops_moved_to;
229 _live_oops_index = 0;
230 _live_oops_index_at_perm = 0;
231 }
232 #endif
233 }
235 void GenMarkSweep::mark_sweep_phase1(int level,
236 bool clear_all_softrefs) {
237 // Recursively traverse all live objects and mark them
238 EventMark m("1 mark object");
239 TraceTime tm("phase 1", PrintGC && Verbose, true, gclog_or_tty);
240 trace(" 1");
242 VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false));
244 GenCollectedHeap* gch = GenCollectedHeap::heap();
246 // Because follow_root_closure is created statically, cannot
247 // use OopsInGenClosure constructor which takes a generation,
248 // as the Universe has not been created when the static constructors
249 // are run.
250 follow_root_closure.set_orig_generation(gch->get_gen(level));
252 gch->gen_process_strong_roots(level,
253 false, // Younger gens are not roots.
254 true, // activate StrongRootsScope
255 true, // Collecting permanent generation.
256 SharedHeap::SO_SystemClasses,
257 &follow_root_closure,
258 true, // walk code active on stacks
259 &follow_root_closure);
261 // Process reference objects found during marking
262 {
263 ref_processor()->setup_policy(clear_all_softrefs);
264 ref_processor()->process_discovered_references(
265 &is_alive, &keep_alive, &follow_stack_closure, NULL);
266 }
268 // Follow system dictionary roots and unload classes
269 bool purged_class = SystemDictionary::do_unloading(&is_alive);
271 // Follow code cache roots
272 CodeCache::do_unloading(&is_alive, &keep_alive, purged_class);
273 follow_stack(); // Flush marking stack
275 // Update subklass/sibling/implementor links of live klasses
276 follow_weak_klass_links();
277 assert(_marking_stack->is_empty(), "just drained");
279 // Visit memoized MDO's and clear any unmarked weak refs
280 follow_mdo_weak_refs();
281 assert(_marking_stack->is_empty(), "just drained");
283 // Visit symbol and interned string tables and delete unmarked oops
284 SymbolTable::unlink(&is_alive);
285 StringTable::unlink(&is_alive);
287 assert(_marking_stack->is_empty(), "stack should be empty by now");
288 }
291 void GenMarkSweep::mark_sweep_phase2() {
292 // Now all live objects are marked, compute the new object addresses.
294 // It is imperative that we traverse perm_gen LAST. If dead space is
295 // allowed a range of dead object may get overwritten by a dead int
296 // array. If perm_gen is not traversed last a klassOop may get
297 // overwritten. This is fine since it is dead, but if the class has dead
298 // instances we have to skip them, and in order to find their size we
299 // need the klassOop!
300 //
301 // It is not required that we traverse spaces in the same order in
302 // phase2, phase3 and phase4, but the ValidateMarkSweep live oops
303 // tracking expects us to do so. See comment under phase4.
305 GenCollectedHeap* gch = GenCollectedHeap::heap();
306 Generation* pg = gch->perm_gen();
308 EventMark m("2 compute new addresses");
309 TraceTime tm("phase 2", PrintGC && Verbose, true, gclog_or_tty);
310 trace("2");
312 VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false));
314 gch->prepare_for_compaction();
316 VALIDATE_MARK_SWEEP_ONLY(_live_oops_index_at_perm = _live_oops_index);
317 CompactPoint perm_cp(pg, NULL, NULL);
318 pg->prepare_for_compaction(&perm_cp);
319 }
321 class GenAdjustPointersClosure: public GenCollectedHeap::GenClosure {
322 public:
323 void do_generation(Generation* gen) {
324 gen->adjust_pointers();
325 }
326 };
328 void GenMarkSweep::mark_sweep_phase3(int level) {
329 GenCollectedHeap* gch = GenCollectedHeap::heap();
330 Generation* pg = gch->perm_gen();
332 // Adjust the pointers to reflect the new locations
333 EventMark m("3 adjust pointers");
334 TraceTime tm("phase 3", PrintGC && Verbose, true, gclog_or_tty);
335 trace("3");
337 VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false));
339 // Needs to be done before the system dictionary is adjusted.
340 pg->pre_adjust_pointers();
342 // Because the two closures below are created statically, cannot
343 // use OopsInGenClosure constructor which takes a generation,
344 // as the Universe has not been created when the static constructors
345 // are run.
346 adjust_root_pointer_closure.set_orig_generation(gch->get_gen(level));
347 adjust_pointer_closure.set_orig_generation(gch->get_gen(level));
349 gch->gen_process_strong_roots(level,
350 false, // Younger gens are not roots.
351 true, // activate StrongRootsScope
352 true, // Collecting permanent generation.
353 SharedHeap::SO_AllClasses,
354 &adjust_root_pointer_closure,
355 false, // do not walk code
356 &adjust_root_pointer_closure);
358 // Now adjust pointers in remaining weak roots. (All of which should
359 // have been cleared if they pointed to non-surviving objects.)
360 CodeBlobToOopClosure adjust_code_pointer_closure(&adjust_pointer_closure,
361 /*do_marking=*/ false);
362 gch->gen_process_weak_roots(&adjust_root_pointer_closure,
363 &adjust_code_pointer_closure,
364 &adjust_pointer_closure);
366 adjust_marks();
367 GenAdjustPointersClosure blk;
368 gch->generation_iterate(&blk, true);
369 pg->adjust_pointers();
370 }
372 class GenCompactClosure: public GenCollectedHeap::GenClosure {
373 public:
374 void do_generation(Generation* gen) {
375 gen->compact();
376 }
377 };
379 void GenMarkSweep::mark_sweep_phase4() {
380 // All pointers are now adjusted, move objects accordingly
382 // It is imperative that we traverse perm_gen first in phase4. All
383 // classes must be allocated earlier than their instances, and traversing
384 // perm_gen first makes sure that all klassOops have moved to their new
385 // location before any instance does a dispatch through it's klass!
387 // The ValidateMarkSweep live oops tracking expects us to traverse spaces
388 // in the same order in phase2, phase3 and phase4. We don't quite do that
389 // here (perm_gen first rather than last), so we tell the validate code
390 // to use a higher index (saved from phase2) when verifying perm_gen.
391 GenCollectedHeap* gch = GenCollectedHeap::heap();
392 Generation* pg = gch->perm_gen();
394 EventMark m("4 compact heap");
395 TraceTime tm("phase 4", PrintGC && Verbose, true, gclog_or_tty);
396 trace("4");
398 VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(true));
400 pg->compact();
402 VALIDATE_MARK_SWEEP_ONLY(reset_live_oop_tracking(false));
404 GenCompactClosure blk;
405 gch->generation_iterate(&blk, true);
407 VALIDATE_MARK_SWEEP_ONLY(compaction_complete());
409 pg->post_compact(); // Shared spaces verification.
410 }