Tue, 22 Sep 2009 14:06:10 -0700
6884624: Update copyright year
Summary: Update copyright for files that have been modified in 2009 through Septermber
Reviewed-by: tbell, ohair
1 /*
2 * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
25 # include "incls/_precompiled.incl"
26 # include "incls/_instanceRefKlass.cpp.incl"
28 template <class T>
29 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
30 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
31 T heap_oop = oopDesc::load_heap_oop(referent_addr);
32 debug_only(
33 if(TraceReferenceGC && PrintGCDetails) {
34 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
35 }
36 )
37 if (!oopDesc::is_null(heap_oop)) {
38 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
39 if (!referent->is_gc_marked() &&
40 MarkSweep::ref_processor()->
41 discover_reference(obj, ref->reference_type())) {
42 // reference already enqueued, referent will be traversed later
43 ref->instanceKlass::oop_follow_contents(obj);
44 debug_only(
45 if(TraceReferenceGC && PrintGCDetails) {
46 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
47 }
48 )
49 return;
50 } else {
51 // treat referent as normal oop
52 debug_only(
53 if(TraceReferenceGC && PrintGCDetails) {
54 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
55 }
56 )
57 MarkSweep::mark_and_push(referent_addr);
58 }
59 }
60 // treat next as normal oop. next is a link in the pending list.
61 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
62 debug_only(
63 if(TraceReferenceGC && PrintGCDetails) {
64 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
65 }
66 )
67 MarkSweep::mark_and_push(next_addr);
68 ref->instanceKlass::oop_follow_contents(obj);
69 }
71 void instanceRefKlass::oop_follow_contents(oop obj) {
72 if (UseCompressedOops) {
73 specialized_oop_follow_contents<narrowOop>(this, obj);
74 } else {
75 specialized_oop_follow_contents<oop>(this, obj);
76 }
77 }
79 #ifndef SERIALGC
80 template <class T>
81 static void specialized_oop_follow_contents(instanceRefKlass* ref,
82 ParCompactionManager* cm,
83 oop obj) {
84 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
85 T heap_oop = oopDesc::load_heap_oop(referent_addr);
86 debug_only(
87 if(TraceReferenceGC && PrintGCDetails) {
88 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
89 }
90 )
91 if (!oopDesc::is_null(heap_oop)) {
92 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
93 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
94 PSParallelCompact::ref_processor()->
95 discover_reference(obj, ref->reference_type())) {
96 // reference already enqueued, referent will be traversed later
97 ref->instanceKlass::oop_follow_contents(cm, obj);
98 debug_only(
99 if(TraceReferenceGC && PrintGCDetails) {
100 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
101 }
102 )
103 return;
104 } else {
105 // treat referent as normal oop
106 debug_only(
107 if(TraceReferenceGC && PrintGCDetails) {
108 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
109 }
110 )
111 PSParallelCompact::mark_and_push(cm, referent_addr);
112 }
113 }
114 // treat next as normal oop. next is a link in the pending list.
115 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
116 debug_only(
117 if(TraceReferenceGC && PrintGCDetails) {
118 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
119 }
120 )
121 PSParallelCompact::mark_and_push(cm, next_addr);
122 ref->instanceKlass::oop_follow_contents(cm, obj);
123 }
125 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
126 oop obj) {
127 if (UseCompressedOops) {
128 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
129 } else {
130 specialized_oop_follow_contents<oop>(this, cm, obj);
131 }
132 }
133 #endif // SERIALGC
135 #ifdef ASSERT
136 template <class T> void trace_reference_gc(const char *s, oop obj,
137 T* referent_addr,
138 T* next_addr,
139 T* discovered_addr) {
140 if(TraceReferenceGC && PrintGCDetails) {
141 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
142 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
143 INTPTR_FORMAT, referent_addr,
144 referent_addr ?
145 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
146 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
147 INTPTR_FORMAT, next_addr,
148 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
149 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
150 INTPTR_FORMAT, discovered_addr,
151 discovered_addr ?
152 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
153 }
154 }
155 #endif
157 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
158 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
159 MarkSweep::adjust_pointer(referent_addr);
160 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
161 MarkSweep::adjust_pointer(next_addr);
162 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
163 MarkSweep::adjust_pointer(discovered_addr);
164 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
165 referent_addr, next_addr, discovered_addr);)
166 }
168 int instanceRefKlass::oop_adjust_pointers(oop obj) {
169 int size = size_helper();
170 instanceKlass::oop_adjust_pointers(obj);
172 if (UseCompressedOops) {
173 specialized_oop_adjust_pointers<narrowOop>(this, obj);
174 } else {
175 specialized_oop_adjust_pointers<oop>(this, obj);
176 }
177 return size;
178 }
180 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
181 if (closure->apply_to_weak_ref_discovered_field()) { \
182 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
183 closure->do_oop##nv_suffix(disc_addr); \
184 } \
185 \
186 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
187 T heap_oop = oopDesc::load_heap_oop(referent_addr); \
188 if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) { \
189 ReferenceProcessor* rp = closure->_ref_processor; \
190 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \
191 if (!referent->is_gc_marked() && (rp != NULL) && \
192 rp->discover_reference(obj, reference_type())) { \
193 return size; \
194 } else { \
195 /* treat referent as normal oop */ \
196 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
197 closure->do_oop##nv_suffix(referent_addr); \
198 } \
199 } \
200 /* treat next as normal oop */ \
201 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
202 if (contains(next_addr)) { \
203 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
204 closure->do_oop##nv_suffix(next_addr); \
205 } \
206 return size; \
209 template <class T> bool contains(T *t) { return true; }
211 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
212 // all closures. Macros calling macros above for each oop size.
214 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
215 \
216 int instanceRefKlass:: \
217 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
218 /* Get size before changing pointers */ \
219 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
220 \
221 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
222 \
223 if (UseCompressedOops) { \
224 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
225 } else { \
226 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
227 } \
228 }
230 #ifndef SERIALGC
231 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
232 \
233 int instanceRefKlass:: \
234 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
235 /* Get size before changing pointers */ \
236 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
237 \
238 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
239 \
240 if (UseCompressedOops) { \
241 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
242 } else { \
243 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
244 } \
245 }
246 #endif // !SERIALGC
249 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
250 \
251 int instanceRefKlass:: \
252 oop_oop_iterate##nv_suffix##_m(oop obj, \
253 OopClosureType* closure, \
254 MemRegion mr) { \
255 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
256 \
257 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
258 if (UseCompressedOops) { \
259 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
260 } else { \
261 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
262 } \
263 }
265 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
266 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
267 #ifndef SERIALGC
268 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
269 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
270 #endif // SERIALGC
271 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
272 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
274 #ifndef SERIALGC
275 template <class T>
276 void specialized_oop_copy_contents(instanceRefKlass *ref,
277 PSPromotionManager* pm, oop obj) {
278 assert(!pm->depth_first(), "invariant");
279 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
280 if (PSScavenge::should_scavenge(referent_addr)) {
281 ReferenceProcessor* rp = PSScavenge::reference_processor();
282 if (rp->discover_reference(obj, ref->reference_type())) {
283 // reference already enqueued, referent and next will be traversed later
284 ref->instanceKlass::oop_copy_contents(pm, obj);
285 return;
286 } else {
287 // treat referent as normal oop
288 pm->claim_or_forward_breadth(referent_addr);
289 }
290 }
291 // treat next as normal oop
292 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
293 if (PSScavenge::should_scavenge(next_addr)) {
294 pm->claim_or_forward_breadth(next_addr);
295 }
296 ref->instanceKlass::oop_copy_contents(pm, obj);
297 }
299 void instanceRefKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
300 if (UseCompressedOops) {
301 specialized_oop_copy_contents<narrowOop>(this, pm, obj);
302 } else {
303 specialized_oop_copy_contents<oop>(this, pm, obj);
304 }
305 }
307 template <class T>
308 void specialized_oop_push_contents(instanceRefKlass *ref,
309 PSPromotionManager* pm, oop obj) {
310 assert(pm->depth_first(), "invariant");
311 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
312 if (PSScavenge::should_scavenge(referent_addr)) {
313 ReferenceProcessor* rp = PSScavenge::reference_processor();
314 if (rp->discover_reference(obj, ref->reference_type())) {
315 // reference already enqueued, referent and next will be traversed later
316 ref->instanceKlass::oop_push_contents(pm, obj);
317 return;
318 } else {
319 // treat referent as normal oop
320 pm->claim_or_forward_depth(referent_addr);
321 }
322 }
323 // treat next as normal oop
324 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
325 if (PSScavenge::should_scavenge(next_addr)) {
326 pm->claim_or_forward_depth(next_addr);
327 }
328 ref->instanceKlass::oop_push_contents(pm, obj);
329 }
331 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
332 if (UseCompressedOops) {
333 specialized_oop_push_contents<narrowOop>(this, pm, obj);
334 } else {
335 specialized_oop_push_contents<oop>(this, pm, obj);
336 }
337 }
339 template <class T>
340 void specialized_oop_update_pointers(instanceRefKlass *ref,
341 ParCompactionManager* cm, oop obj) {
342 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
343 PSParallelCompact::adjust_pointer(referent_addr);
344 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
345 PSParallelCompact::adjust_pointer(next_addr);
346 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
347 PSParallelCompact::adjust_pointer(discovered_addr);
348 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
349 referent_addr, next_addr, discovered_addr);)
350 }
352 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
353 instanceKlass::oop_update_pointers(cm, obj);
354 if (UseCompressedOops) {
355 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
356 } else {
357 specialized_oop_update_pointers<oop>(this, cm, obj);
358 }
359 return size_helper();
360 }
363 template <class T> void
364 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
365 HeapWord* beg_addr, HeapWord* end_addr) {
366 T* p;
367 T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
368 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
369 T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
370 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
371 T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
372 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
373 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
374 referent_addr, next_addr, discovered_addr);)
375 }
377 int
378 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
379 HeapWord* beg_addr, HeapWord* end_addr) {
380 instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
381 if (UseCompressedOops) {
382 specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
383 } else {
384 specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
385 }
386 return size_helper();
387 }
388 #endif // SERIALGC
390 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
391 // Clear the nonstatic oop-map entries corresponding to referent
392 // and nextPending field. They are treated specially by the
393 // garbage collector.
394 // The discovered field is used only by the garbage collector
395 // and is also treated specially.
396 instanceKlass* ik = instanceKlass::cast(k);
398 // Check that we have the right class
399 debug_only(static bool first_time = true);
400 assert(k == SystemDictionary::reference_klass() && first_time,
401 "Invalid update of maps");
402 debug_only(first_time = false);
403 assert(ik->nonstatic_oop_map_count() == 1, "just checking");
405 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
407 // Check that the current map is (2,4) - currently points at field with
408 // offset 2 (words) and has 4 map entries.
409 debug_only(int offset = java_lang_ref_Reference::referent_offset);
410 debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
411 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
413 if (UseSharedSpaces) {
414 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
415 map->count() == 1, "just checking");
416 } else {
417 assert(map->offset() == offset && map->count() == count,
418 "just checking");
420 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
421 map->set_offset(java_lang_ref_Reference::queue_offset);
422 map->set_count(1);
423 }
424 }
427 // Verification
429 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
430 instanceKlass::oop_verify_on(obj, st);
431 // Verify referent field
432 oop referent = java_lang_ref_Reference::referent(obj);
434 // We should make this general to all heaps
435 GenCollectedHeap* gch = NULL;
436 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
437 gch = GenCollectedHeap::heap();
439 if (referent != NULL) {
440 guarantee(referent->is_oop(), "referent field heap failed");
441 if (gch != NULL && !gch->is_in_youngest(obj)) {
442 // We do a specific remembered set check here since the referent
443 // field is not part of the oop mask and therefore skipped by the
444 // regular verify code.
445 if (UseCompressedOops) {
446 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
447 obj->verify_old_oop(referent_addr, true);
448 } else {
449 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
450 obj->verify_old_oop(referent_addr, true);
451 }
452 }
453 }
454 // Verify next field
455 oop next = java_lang_ref_Reference::next(obj);
456 if (next != NULL) {
457 guarantee(next->is_oop(), "next field verify failed");
458 guarantee(next->is_instanceRef(), "next field verify failed");
459 if (gch != NULL && !gch->is_in_youngest(obj)) {
460 // We do a specific remembered set check here since the next field is
461 // not part of the oop mask and therefore skipped by the regular
462 // verify code.
463 if (UseCompressedOops) {
464 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
465 obj->verify_old_oop(next_addr, true);
466 } else {
467 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
468 obj->verify_old_oop(next_addr, true);
469 }
470 }
471 }
472 }
474 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
475 // we may enter this with pending exception set
476 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
477 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
478 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
479 assert(ObjectSynchronizer::current_thread_holds_lock(
480 JavaThread::current(), h_lock),
481 "Locking should have succeeded");
482 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
483 }
485 void instanceRefKlass::release_and_notify_pending_list_lock(
486 BasicLock *pending_list_basic_lock) {
487 // we may enter this with pending exception set
488 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
489 //
490 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
491 assert(ObjectSynchronizer::current_thread_holds_lock(
492 JavaThread::current(), h_lock),
493 "Lock should be held");
494 // Notify waiters on pending lists lock if there is any reference.
495 if (java_lang_ref_Reference::pending_list() != NULL) {
496 ObjectSynchronizer::notifyall(h_lock, THREAD);
497 }
498 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
499 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
500 }