Wed, 07 Sep 2011 13:55:42 -0700
4965777: GC changes to support use of discovered field for pending references
Summary: If and when the reference handler thread is able to use the discovered field to link reference objects in its pending list, so will GC. In that case, GC will scan through this field once a reference object has been placed on the pending list, but not scan that field before that stage, as the field is used by the concurrent GC thread to link discovered objects. When ReferenceHandleR thread does not use the discovered field for the purpose of linking the elements in the pending list, as would be the case in older JDKs, the JVM will fall back to the old behaviour of using the next field for that purpose.
Reviewed-by: jcoomes, mchung, stefank
1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "gc_implementation/shared/markSweep.inline.hpp"
29 #include "gc_interface/collectedHeap.hpp"
30 #include "gc_interface/collectedHeap.inline.hpp"
31 #include "memory/genCollectedHeap.hpp"
32 #include "memory/genOopClosures.inline.hpp"
33 #include "oops/instanceRefKlass.hpp"
34 #include "oops/oop.inline.hpp"
35 #include "utilities/preserveException.hpp"
36 #ifndef SERIALGC
37 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
38 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
39 #include "gc_implementation/g1/g1RemSet.inline.hpp"
40 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
41 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
42 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
43 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
44 #include "oops/oop.pcgc.inline.hpp"
45 #endif
47 template <class T>
48 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
49 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
50 T heap_oop = oopDesc::load_heap_oop(referent_addr);
51 debug_only(
52 if(TraceReferenceGC && PrintGCDetails) {
53 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
54 }
55 )
56 if (!oopDesc::is_null(heap_oop)) {
57 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
58 if (!referent->is_gc_marked() &&
59 MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
60 // reference was discovered, referent will be traversed later
61 ref->instanceKlass::oop_follow_contents(obj);
62 debug_only(
63 if(TraceReferenceGC && PrintGCDetails) {
64 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
65 }
66 )
67 return;
68 } else {
69 // treat referent as normal oop
70 debug_only(
71 if(TraceReferenceGC && PrintGCDetails) {
72 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
73 }
74 )
75 MarkSweep::mark_and_push(referent_addr);
76 }
77 }
78 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
79 if (ReferenceProcessor::pending_list_uses_discovered_field()) {
80 // Treat discovered as normal oop, if ref is not "active",
81 // i.e. if next is non-NULL.
82 T next_oop = oopDesc::load_heap_oop(next_addr);
83 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
84 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
85 debug_only(
86 if(TraceReferenceGC && PrintGCDetails) {
87 gclog_or_tty->print_cr(" Process discovered as normal "
88 INTPTR_FORMAT, discovered_addr);
89 }
90 )
91 MarkSweep::mark_and_push(discovered_addr);
92 }
93 } else {
94 #ifdef ASSERT
95 // In the case of older JDKs which do not use the discovered
96 // field for the pending list, an inactive ref (next != NULL)
97 // must always have a NULL discovered field.
98 oop next = oopDesc::load_decode_heap_oop(next_addr);
99 oop discovered = java_lang_ref_Reference::discovered(obj);
100 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
101 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
102 obj));
103 #endif
104 }
105 // treat next as normal oop. next is a link in the reference queue.
106 debug_only(
107 if(TraceReferenceGC && PrintGCDetails) {
108 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
109 }
110 )
111 MarkSweep::mark_and_push(next_addr);
112 ref->instanceKlass::oop_follow_contents(obj);
113 }
115 void instanceRefKlass::oop_follow_contents(oop obj) {
116 if (UseCompressedOops) {
117 specialized_oop_follow_contents<narrowOop>(this, obj);
118 } else {
119 specialized_oop_follow_contents<oop>(this, obj);
120 }
121 }
123 #ifndef SERIALGC
124 template <class T>
125 void specialized_oop_follow_contents(instanceRefKlass* ref,
126 ParCompactionManager* cm,
127 oop obj) {
128 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
129 T heap_oop = oopDesc::load_heap_oop(referent_addr);
130 debug_only(
131 if(TraceReferenceGC && PrintGCDetails) {
132 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
133 }
134 )
135 if (!oopDesc::is_null(heap_oop)) {
136 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
137 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
138 PSParallelCompact::ref_processor()->
139 discover_reference(obj, ref->reference_type())) {
140 // reference already enqueued, referent will be traversed later
141 ref->instanceKlass::oop_follow_contents(cm, obj);
142 debug_only(
143 if(TraceReferenceGC && PrintGCDetails) {
144 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
145 }
146 )
147 return;
148 } else {
149 // treat referent as normal oop
150 debug_only(
151 if(TraceReferenceGC && PrintGCDetails) {
152 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
153 }
154 )
155 PSParallelCompact::mark_and_push(cm, referent_addr);
156 }
157 }
158 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
159 if (ReferenceProcessor::pending_list_uses_discovered_field()) {
160 // Treat discovered as normal oop, if ref is not "active",
161 // i.e. if next is non-NULL.
162 T next_oop = oopDesc::load_heap_oop(next_addr);
163 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
164 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
165 debug_only(
166 if(TraceReferenceGC && PrintGCDetails) {
167 gclog_or_tty->print_cr(" Process discovered as normal "
168 INTPTR_FORMAT, discovered_addr);
169 }
170 )
171 PSParallelCompact::mark_and_push(cm, discovered_addr);
172 }
173 } else {
174 #ifdef ASSERT
175 // In the case of older JDKs which do not use the discovered
176 // field for the pending list, an inactive ref (next != NULL)
177 // must always have a NULL discovered field.
178 T next = oopDesc::load_heap_oop(next_addr);
179 oop discovered = java_lang_ref_Reference::discovered(obj);
180 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
181 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
182 obj));
183 #endif
184 }
185 PSParallelCompact::mark_and_push(cm, next_addr);
186 ref->instanceKlass::oop_follow_contents(cm, obj);
187 }
189 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
190 oop obj) {
191 if (UseCompressedOops) {
192 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
193 } else {
194 specialized_oop_follow_contents<oop>(this, cm, obj);
195 }
196 }
197 #endif // SERIALGC
199 #ifdef ASSERT
200 template <class T> void trace_reference_gc(const char *s, oop obj,
201 T* referent_addr,
202 T* next_addr,
203 T* discovered_addr) {
204 if(TraceReferenceGC && PrintGCDetails) {
205 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
206 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
207 INTPTR_FORMAT, referent_addr,
208 referent_addr ?
209 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
210 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
211 INTPTR_FORMAT, next_addr,
212 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
213 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
214 INTPTR_FORMAT, discovered_addr,
215 discovered_addr ?
216 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
217 }
218 }
219 #endif
221 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
222 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
223 MarkSweep::adjust_pointer(referent_addr);
224 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
225 MarkSweep::adjust_pointer(next_addr);
226 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
227 MarkSweep::adjust_pointer(discovered_addr);
228 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
229 referent_addr, next_addr, discovered_addr);)
230 }
232 int instanceRefKlass::oop_adjust_pointers(oop obj) {
233 int size = size_helper();
234 instanceKlass::oop_adjust_pointers(obj);
236 if (UseCompressedOops) {
237 specialized_oop_adjust_pointers<narrowOop>(this, obj);
238 } else {
239 specialized_oop_adjust_pointers<oop>(this, obj);
240 }
241 return size;
242 }
244 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
245 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
246 if (closure->apply_to_weak_ref_discovered_field()) { \
247 closure->do_oop##nv_suffix(disc_addr); \
248 } \
249 \
250 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
251 T heap_oop = oopDesc::load_heap_oop(referent_addr); \
252 ReferenceProcessor* rp = closure->_ref_processor; \
253 if (!oopDesc::is_null(heap_oop)) { \
254 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \
255 if (!referent->is_gc_marked() && (rp != NULL) && \
256 rp->discover_reference(obj, reference_type())) { \
257 return size; \
258 } else if (contains(referent_addr)) { \
259 /* treat referent as normal oop */ \
260 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
261 closure->do_oop##nv_suffix(referent_addr); \
262 } \
263 } \
264 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
265 if (ReferenceProcessor::pending_list_uses_discovered_field()) { \
266 T next_oop = oopDesc::load_heap_oop(next_addr); \
267 /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
268 if (!oopDesc::is_null(next_oop) && contains(disc_addr)) { \
269 /* i.e. ref is not "active" */ \
270 debug_only( \
271 if(TraceReferenceGC && PrintGCDetails) { \
272 gclog_or_tty->print_cr(" Process discovered as normal " \
273 INTPTR_FORMAT, disc_addr); \
274 } \
275 ) \
276 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
277 closure->do_oop##nv_suffix(disc_addr); \
278 } \
279 } else { \
280 /* In the case of older JDKs which do not use the discovered field for */ \
281 /* the pending list, an inactive ref (next != NULL) must always have a */ \
282 /* NULL discovered field. */ \
283 debug_only( \
284 T next_oop = oopDesc::load_heap_oop(next_addr); \
285 T disc_oop = oopDesc::load_heap_oop(disc_addr); \
286 assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop), \
287 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
288 "discovered field", obj)); \
289 ) \
290 } \
291 /* treat next as normal oop */ \
292 if (contains(next_addr)) { \
293 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
294 closure->do_oop##nv_suffix(next_addr); \
295 } \
296 return size; \
299 template <class T> bool contains(T *t) { return true; }
301 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
302 // all closures. Macros calling macros above for each oop size.
304 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
305 \
306 int instanceRefKlass:: \
307 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
308 /* Get size before changing pointers */ \
309 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
310 \
311 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
312 \
313 if (UseCompressedOops) { \
314 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
315 } else { \
316 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
317 } \
318 }
320 #ifndef SERIALGC
321 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
322 \
323 int instanceRefKlass:: \
324 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
325 /* Get size before changing pointers */ \
326 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
327 \
328 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
329 \
330 if (UseCompressedOops) { \
331 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
332 } else { \
333 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
334 } \
335 }
336 #endif // !SERIALGC
339 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
340 \
341 int instanceRefKlass:: \
342 oop_oop_iterate##nv_suffix##_m(oop obj, \
343 OopClosureType* closure, \
344 MemRegion mr) { \
345 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
346 \
347 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
348 if (UseCompressedOops) { \
349 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
350 } else { \
351 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
352 } \
353 }
355 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
356 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
357 #ifndef SERIALGC
358 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
359 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
360 #endif // SERIALGC
361 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
362 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
364 #ifndef SERIALGC
365 template <class T>
366 void specialized_oop_push_contents(instanceRefKlass *ref,
367 PSPromotionManager* pm, oop obj) {
368 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
369 if (PSScavenge::should_scavenge(referent_addr)) {
370 ReferenceProcessor* rp = PSScavenge::reference_processor();
371 if (rp->discover_reference(obj, ref->reference_type())) {
372 // reference already enqueued, referent and next will be traversed later
373 ref->instanceKlass::oop_push_contents(pm, obj);
374 return;
375 } else {
376 // treat referent as normal oop
377 pm->claim_or_forward_depth(referent_addr);
378 }
379 }
380 // Treat discovered as normal oop, if ref is not "active",
381 // i.e. if next is non-NULL.
382 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
383 if (ReferenceProcessor::pending_list_uses_discovered_field()) {
384 T next_oop = oopDesc::load_heap_oop(next_addr);
385 if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
386 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
387 debug_only(
388 if(TraceReferenceGC && PrintGCDetails) {
389 gclog_or_tty->print_cr(" Process discovered as normal "
390 INTPTR_FORMAT, discovered_addr);
391 }
392 )
393 if (PSScavenge::should_scavenge(discovered_addr)) {
394 pm->claim_or_forward_depth(discovered_addr);
395 }
396 }
397 } else {
398 #ifdef ASSERT
399 // In the case of older JDKs which do not use the discovered
400 // field for the pending list, an inactive ref (next != NULL)
401 // must always have a NULL discovered field.
402 oop next = oopDesc::load_decode_heap_oop(next_addr);
403 oop discovered = java_lang_ref_Reference::discovered(obj);
404 assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
405 err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
406 obj));
407 #endif
408 }
410 // Treat next as normal oop; next is a link in the reference queue.
411 if (PSScavenge::should_scavenge(next_addr)) {
412 pm->claim_or_forward_depth(next_addr);
413 }
414 ref->instanceKlass::oop_push_contents(pm, obj);
415 }
417 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
418 if (UseCompressedOops) {
419 specialized_oop_push_contents<narrowOop>(this, pm, obj);
420 } else {
421 specialized_oop_push_contents<oop>(this, pm, obj);
422 }
423 }
425 template <class T>
426 void specialized_oop_update_pointers(instanceRefKlass *ref,
427 ParCompactionManager* cm, oop obj) {
428 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
429 PSParallelCompact::adjust_pointer(referent_addr);
430 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
431 PSParallelCompact::adjust_pointer(next_addr);
432 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
433 PSParallelCompact::adjust_pointer(discovered_addr);
434 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
435 referent_addr, next_addr, discovered_addr);)
436 }
438 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
439 instanceKlass::oop_update_pointers(cm, obj);
440 if (UseCompressedOops) {
441 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
442 } else {
443 specialized_oop_update_pointers<oop>(this, cm, obj);
444 }
445 return size_helper();
446 }
447 #endif // SERIALGC
449 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
450 // Clear the nonstatic oop-map entries corresponding to referent
451 // and nextPending field. They are treated specially by the
452 // garbage collector.
453 // The discovered field is used only by the garbage collector
454 // and is also treated specially.
455 instanceKlass* ik = instanceKlass::cast(k);
457 // Check that we have the right class
458 debug_only(static bool first_time = true);
459 assert(k == SystemDictionary::Reference_klass() && first_time,
460 "Invalid update of maps");
461 debug_only(first_time = false);
462 assert(ik->nonstatic_oop_map_count() == 1, "just checking");
464 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
466 // Check that the current map is (2,4) - currently points at field with
467 // offset 2 (words) and has 4 map entries.
468 debug_only(int offset = java_lang_ref_Reference::referent_offset);
469 debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
470 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
472 if (UseSharedSpaces) {
473 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
474 map->count() == 1, "just checking");
475 } else {
476 assert(map->offset() == offset && map->count() == count,
477 "just checking");
479 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
480 map->set_offset(java_lang_ref_Reference::queue_offset);
481 map->set_count(1);
482 }
483 }
486 // Verification
488 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
489 instanceKlass::oop_verify_on(obj, st);
490 // Verify referent field
491 oop referent = java_lang_ref_Reference::referent(obj);
493 // We should make this general to all heaps
494 GenCollectedHeap* gch = NULL;
495 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
496 gch = GenCollectedHeap::heap();
498 if (referent != NULL) {
499 guarantee(referent->is_oop(), "referent field heap failed");
500 if (gch != NULL && !gch->is_in_young(obj)) {
501 // We do a specific remembered set check here since the referent
502 // field is not part of the oop mask and therefore skipped by the
503 // regular verify code.
504 if (UseCompressedOops) {
505 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
506 obj->verify_old_oop(referent_addr, true);
507 } else {
508 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
509 obj->verify_old_oop(referent_addr, true);
510 }
511 }
512 }
513 // Verify next field
514 oop next = java_lang_ref_Reference::next(obj);
515 if (next != NULL) {
516 guarantee(next->is_oop(), "next field verify failed");
517 guarantee(next->is_instanceRef(), "next field verify failed");
518 if (gch != NULL && !gch->is_in_young(obj)) {
519 // We do a specific remembered set check here since the next field is
520 // not part of the oop mask and therefore skipped by the regular
521 // verify code.
522 if (UseCompressedOops) {
523 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
524 obj->verify_old_oop(next_addr, true);
525 } else {
526 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
527 obj->verify_old_oop(next_addr, true);
528 }
529 }
530 }
531 }
533 bool instanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
534 if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
535 Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
536 return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
537 }
539 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
540 // we may enter this with pending exception set
541 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
542 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
543 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
544 assert(ObjectSynchronizer::current_thread_holds_lock(
545 JavaThread::current(), h_lock),
546 "Locking should have succeeded");
547 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
548 }
550 void instanceRefKlass::release_and_notify_pending_list_lock(
551 BasicLock *pending_list_basic_lock) {
552 // we may enter this with pending exception set
553 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
554 //
555 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
556 assert(ObjectSynchronizer::current_thread_holds_lock(
557 JavaThread::current(), h_lock),
558 "Lock should be held");
559 // Notify waiters on pending lists lock if there is any reference.
560 if (java_lang_ref_Reference::pending_list() != NULL) {
561 ObjectSynchronizer::notifyall(h_lock, THREAD);
562 }
563 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
564 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
565 }