Thu, 21 Aug 2008 23:36:31 -0400
Merge
1 /*
2 * Copyright 1997-2008 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
25 # include "incls/_precompiled.incl"
26 # include "incls/_instanceRefKlass.cpp.incl"
28 template <class T>
29 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
30 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
31 oop referent = oopDesc::load_decode_heap_oop(referent_addr);
32 debug_only(
33 if(TraceReferenceGC && PrintGCDetails) {
34 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
35 }
36 )
37 if (referent != NULL) {
38 if (!referent->is_gc_marked() &&
39 MarkSweep::ref_processor()->
40 discover_reference(obj, ref->reference_type())) {
41 // reference already enqueued, referent will be traversed later
42 ref->instanceKlass::oop_follow_contents(obj);
43 debug_only(
44 if(TraceReferenceGC && PrintGCDetails) {
45 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
46 }
47 )
48 return;
49 } else {
50 // treat referent as normal oop
51 debug_only(
52 if(TraceReferenceGC && PrintGCDetails) {
53 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
54 }
55 )
56 MarkSweep::mark_and_push(referent_addr);
57 }
58 }
59 // treat next as normal oop. next is a link in the pending list.
60 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
61 debug_only(
62 if(TraceReferenceGC && PrintGCDetails) {
63 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
64 }
65 )
66 MarkSweep::mark_and_push(next_addr);
67 ref->instanceKlass::oop_follow_contents(obj);
68 }
70 void instanceRefKlass::oop_follow_contents(oop obj) {
71 if (UseCompressedOops) {
72 specialized_oop_follow_contents<narrowOop>(this, obj);
73 } else {
74 specialized_oop_follow_contents<oop>(this, obj);
75 }
76 }
78 #ifndef SERIALGC
79 template <class T>
80 static void specialized_oop_follow_contents(instanceRefKlass* ref,
81 ParCompactionManager* cm,
82 oop obj) {
83 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
84 oop referent = oopDesc::load_decode_heap_oop(referent_addr);
85 debug_only(
86 if(TraceReferenceGC && PrintGCDetails) {
87 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
88 }
89 )
90 if (referent != NULL) {
91 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
92 PSParallelCompact::ref_processor()->
93 discover_reference(obj, ref->reference_type())) {
94 // reference already enqueued, referent will be traversed later
95 ref->instanceKlass::oop_follow_contents(cm, obj);
96 debug_only(
97 if(TraceReferenceGC && PrintGCDetails) {
98 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
99 }
100 )
101 return;
102 } else {
103 // treat referent as normal oop
104 debug_only(
105 if(TraceReferenceGC && PrintGCDetails) {
106 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
107 }
108 )
109 PSParallelCompact::mark_and_push(cm, referent_addr);
110 }
111 }
112 // treat next as normal oop. next is a link in the pending list.
113 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
114 debug_only(
115 if(TraceReferenceGC && PrintGCDetails) {
116 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
117 }
118 )
119 PSParallelCompact::mark_and_push(cm, next_addr);
120 ref->instanceKlass::oop_follow_contents(cm, obj);
121 }
123 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
124 oop obj) {
125 if (UseCompressedOops) {
126 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
127 } else {
128 specialized_oop_follow_contents<oop>(this, cm, obj);
129 }
130 }
131 #endif // SERIALGC
133 #ifdef ASSERT
134 template <class T> void trace_reference_gc(const char *s, oop obj,
135 T* referent_addr,
136 T* next_addr,
137 T* discovered_addr) {
138 if(TraceReferenceGC && PrintGCDetails) {
139 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
140 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
141 INTPTR_FORMAT, referent_addr,
142 referent_addr ?
143 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
144 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
145 INTPTR_FORMAT, next_addr,
146 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
147 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
148 INTPTR_FORMAT, discovered_addr,
149 discovered_addr ?
150 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
151 }
152 }
153 #endif
155 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
156 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
157 MarkSweep::adjust_pointer(referent_addr);
158 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
159 MarkSweep::adjust_pointer(next_addr);
160 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
161 MarkSweep::adjust_pointer(discovered_addr);
162 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
163 referent_addr, next_addr, discovered_addr);)
164 }
166 int instanceRefKlass::oop_adjust_pointers(oop obj) {
167 int size = size_helper();
168 instanceKlass::oop_adjust_pointers(obj);
170 if (UseCompressedOops) {
171 specialized_oop_adjust_pointers<narrowOop>(this, obj);
172 } else {
173 specialized_oop_adjust_pointers<oop>(this, obj);
174 }
175 return size;
176 }
178 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
179 if (closure->apply_to_weak_ref_discovered_field()) { \
180 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
181 closure->do_oop##nv_suffix(disc_addr); \
182 } \
183 \
184 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
185 oop referent = oopDesc::load_decode_heap_oop(referent_addr); \
186 if (referent != NULL && contains(referent_addr)) { \
187 ReferenceProcessor* rp = closure->_ref_processor; \
188 if (!referent->is_gc_marked() && (rp != NULL) && \
189 rp->discover_reference(obj, reference_type())) { \
190 return size; \
191 } else { \
192 /* treat referent as normal oop */ \
193 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
194 closure->do_oop##nv_suffix(referent_addr); \
195 } \
196 } \
197 /* treat next as normal oop */ \
198 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
199 if (contains(next_addr)) { \
200 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
201 closure->do_oop##nv_suffix(next_addr); \
202 } \
203 return size; \
206 template <class T> bool contains(T *t) { return true; }
208 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
209 // all closures. Macros calling macros above for each oop size.
211 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
212 \
213 int instanceRefKlass:: \
214 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
215 /* Get size before changing pointers */ \
216 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
217 \
218 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
219 \
220 if (UseCompressedOops) { \
221 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
222 } else { \
223 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
224 } \
225 }
227 #ifndef SERIALGC
228 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
229 \
230 int instanceRefKlass:: \
231 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
232 /* Get size before changing pointers */ \
233 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
234 \
235 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
236 \
237 if (UseCompressedOops) { \
238 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
239 } else { \
240 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
241 } \
242 }
243 #endif // !SERIALGC
246 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
247 \
248 int instanceRefKlass:: \
249 oop_oop_iterate##nv_suffix##_m(oop obj, \
250 OopClosureType* closure, \
251 MemRegion mr) { \
252 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
253 \
254 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
255 if (UseCompressedOops) { \
256 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
257 } else { \
258 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
259 } \
260 }
262 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
263 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
264 #ifndef SERIALGC
265 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
266 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
267 #endif // SERIALGC
268 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
269 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
271 #ifndef SERIALGC
272 template <class T>
273 void specialized_oop_copy_contents(instanceRefKlass *ref,
274 PSPromotionManager* pm, oop obj) {
275 assert(!pm->depth_first(), "invariant");
276 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
277 if (PSScavenge::should_scavenge(referent_addr)) {
278 ReferenceProcessor* rp = PSScavenge::reference_processor();
279 if (rp->discover_reference(obj, ref->reference_type())) {
280 // reference already enqueued, referent and next will be traversed later
281 ref->instanceKlass::oop_copy_contents(pm, obj);
282 return;
283 } else {
284 // treat referent as normal oop
285 pm->claim_or_forward_breadth(referent_addr);
286 }
287 }
288 // treat next as normal oop
289 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
290 if (PSScavenge::should_scavenge(next_addr)) {
291 pm->claim_or_forward_breadth(next_addr);
292 }
293 ref->instanceKlass::oop_copy_contents(pm, obj);
294 }
296 void instanceRefKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
297 if (UseCompressedOops) {
298 specialized_oop_copy_contents<narrowOop>(this, pm, obj);
299 } else {
300 specialized_oop_copy_contents<oop>(this, pm, obj);
301 }
302 }
304 template <class T>
305 void specialized_oop_push_contents(instanceRefKlass *ref,
306 PSPromotionManager* pm, oop obj) {
307 assert(pm->depth_first(), "invariant");
308 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
309 if (PSScavenge::should_scavenge(referent_addr)) {
310 ReferenceProcessor* rp = PSScavenge::reference_processor();
311 if (rp->discover_reference(obj, ref->reference_type())) {
312 // reference already enqueued, referent and next will be traversed later
313 ref->instanceKlass::oop_push_contents(pm, obj);
314 return;
315 } else {
316 // treat referent as normal oop
317 pm->claim_or_forward_depth(referent_addr);
318 }
319 }
320 // treat next as normal oop
321 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
322 if (PSScavenge::should_scavenge(next_addr)) {
323 pm->claim_or_forward_depth(next_addr);
324 }
325 ref->instanceKlass::oop_push_contents(pm, obj);
326 }
328 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
329 if (UseCompressedOops) {
330 specialized_oop_push_contents<narrowOop>(this, pm, obj);
331 } else {
332 specialized_oop_push_contents<oop>(this, pm, obj);
333 }
334 }
336 template <class T>
337 void specialized_oop_update_pointers(instanceRefKlass *ref,
338 ParCompactionManager* cm, oop obj) {
339 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
340 PSParallelCompact::adjust_pointer(referent_addr);
341 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
342 PSParallelCompact::adjust_pointer(next_addr);
343 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
344 PSParallelCompact::adjust_pointer(discovered_addr);
345 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
346 referent_addr, next_addr, discovered_addr);)
347 }
349 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
350 instanceKlass::oop_update_pointers(cm, obj);
351 if (UseCompressedOops) {
352 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
353 } else {
354 specialized_oop_update_pointers<oop>(this, cm, obj);
355 }
356 return size_helper();
357 }
360 template <class T> void
361 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
362 HeapWord* beg_addr, HeapWord* end_addr) {
363 T* p;
364 T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
365 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
366 T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
367 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
368 T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
369 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
370 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
371 referent_addr, next_addr, discovered_addr);)
372 }
374 int
375 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
376 HeapWord* beg_addr, HeapWord* end_addr) {
377 instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
378 if (UseCompressedOops) {
379 specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
380 } else {
381 specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
382 }
383 return size_helper();
384 }
385 #endif // SERIALGC
387 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
388 // Clear the nonstatic oop-map entries corresponding to referent
389 // and nextPending field. They are treated specially by the
390 // garbage collector.
391 // The discovered field is used only by the garbage collector
392 // and is also treated specially.
393 instanceKlass* ik = instanceKlass::cast(k);
395 // Check that we have the right class
396 debug_only(static bool first_time = true);
397 assert(k == SystemDictionary::reference_klass() && first_time,
398 "Invalid update of maps");
399 debug_only(first_time = false);
400 assert(ik->nonstatic_oop_map_size() == 1, "just checking");
402 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
404 // Check that the current map is (2,4) - currently points at field with
405 // offset 2 (words) and has 4 map entries.
406 debug_only(int offset = java_lang_ref_Reference::referent_offset);
407 debug_only(int length = ((java_lang_ref_Reference::discovered_offset -
408 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
410 if (UseSharedSpaces) {
411 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
412 map->length() == 1, "just checking");
413 } else {
414 assert(map->offset() == offset && map->length() == length,
415 "just checking");
417 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
418 map->set_offset(java_lang_ref_Reference::queue_offset);
419 map->set_length(1);
420 }
421 }
424 // Verification
426 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
427 instanceKlass::oop_verify_on(obj, st);
428 // Verify referent field
429 oop referent = java_lang_ref_Reference::referent(obj);
431 // We should make this general to all heaps
432 GenCollectedHeap* gch = NULL;
433 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
434 gch = GenCollectedHeap::heap();
436 if (referent != NULL) {
437 guarantee(referent->is_oop(), "referent field heap failed");
438 if (gch != NULL && !gch->is_in_youngest(obj)) {
439 // We do a specific remembered set check here since the referent
440 // field is not part of the oop mask and therefore skipped by the
441 // regular verify code.
442 if (UseCompressedOops) {
443 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
444 obj->verify_old_oop(referent_addr, true);
445 } else {
446 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
447 obj->verify_old_oop(referent_addr, true);
448 }
449 }
450 }
451 // Verify next field
452 oop next = java_lang_ref_Reference::next(obj);
453 if (next != NULL) {
454 guarantee(next->is_oop(), "next field verify failed");
455 guarantee(next->is_instanceRef(), "next field verify failed");
456 if (gch != NULL && !gch->is_in_youngest(obj)) {
457 // We do a specific remembered set check here since the next field is
458 // not part of the oop mask and therefore skipped by the regular
459 // verify code.
460 if (UseCompressedOops) {
461 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
462 obj->verify_old_oop(next_addr, true);
463 } else {
464 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
465 obj->verify_old_oop(next_addr, true);
466 }
467 }
468 }
469 }
471 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
472 // we may enter this with pending exception set
473 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
474 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
475 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
476 assert(ObjectSynchronizer::current_thread_holds_lock(
477 JavaThread::current(), h_lock),
478 "Locking should have succeeded");
479 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
480 }
482 void instanceRefKlass::release_and_notify_pending_list_lock(
483 BasicLock *pending_list_basic_lock) {
484 // we may enter this with pending exception set
485 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
486 //
487 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
488 assert(ObjectSynchronizer::current_thread_holds_lock(
489 JavaThread::current(), h_lock),
490 "Lock should be held");
491 // Notify waiters on pending lists lock if there is any reference.
492 if (java_lang_ref_Reference::pending_list() != NULL) {
493 ObjectSynchronizer::notifyall(h_lock, THREAD);
494 }
495 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
496 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
497 }