Thu, 07 Apr 2011 09:53:20 -0700
7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes
1 /*
2 * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "gc_implementation/shared/markSweep.inline.hpp"
29 #include "gc_interface/collectedHeap.hpp"
30 #include "gc_interface/collectedHeap.inline.hpp"
31 #include "memory/genCollectedHeap.hpp"
32 #include "memory/genOopClosures.inline.hpp"
33 #include "oops/instanceRefKlass.hpp"
34 #include "oops/oop.inline.hpp"
35 #include "utilities/preserveException.hpp"
36 #ifndef SERIALGC
37 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
38 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
39 #include "gc_implementation/g1/g1RemSet.inline.hpp"
40 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
41 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
42 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
43 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
44 #include "oops/oop.pcgc.inline.hpp"
45 #endif
47 template <class T>
48 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
49 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
50 T heap_oop = oopDesc::load_heap_oop(referent_addr);
51 debug_only(
52 if(TraceReferenceGC && PrintGCDetails) {
53 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
54 }
55 )
56 if (!oopDesc::is_null(heap_oop)) {
57 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
58 if (!referent->is_gc_marked() &&
59 MarkSweep::ref_processor()->
60 discover_reference(obj, ref->reference_type())) {
61 // reference already enqueued, referent will be traversed later
62 ref->instanceKlass::oop_follow_contents(obj);
63 debug_only(
64 if(TraceReferenceGC && PrintGCDetails) {
65 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
66 }
67 )
68 return;
69 } else {
70 // treat referent as normal oop
71 debug_only(
72 if(TraceReferenceGC && PrintGCDetails) {
73 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
74 }
75 )
76 MarkSweep::mark_and_push(referent_addr);
77 }
78 }
79 // treat next as normal oop. next is a link in the pending list.
80 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
81 debug_only(
82 if(TraceReferenceGC && PrintGCDetails) {
83 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
84 }
85 )
86 MarkSweep::mark_and_push(next_addr);
87 ref->instanceKlass::oop_follow_contents(obj);
88 }
90 void instanceRefKlass::oop_follow_contents(oop obj) {
91 if (UseCompressedOops) {
92 specialized_oop_follow_contents<narrowOop>(this, obj);
93 } else {
94 specialized_oop_follow_contents<oop>(this, obj);
95 }
96 }
98 #ifndef SERIALGC
99 template <class T>
100 void specialized_oop_follow_contents(instanceRefKlass* ref,
101 ParCompactionManager* cm,
102 oop obj) {
103 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
104 T heap_oop = oopDesc::load_heap_oop(referent_addr);
105 debug_only(
106 if(TraceReferenceGC && PrintGCDetails) {
107 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
108 }
109 )
110 if (!oopDesc::is_null(heap_oop)) {
111 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
112 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
113 PSParallelCompact::ref_processor()->
114 discover_reference(obj, ref->reference_type())) {
115 // reference already enqueued, referent will be traversed later
116 ref->instanceKlass::oop_follow_contents(cm, obj);
117 debug_only(
118 if(TraceReferenceGC && PrintGCDetails) {
119 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
120 }
121 )
122 return;
123 } else {
124 // treat referent as normal oop
125 debug_only(
126 if(TraceReferenceGC && PrintGCDetails) {
127 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
128 }
129 )
130 PSParallelCompact::mark_and_push(cm, referent_addr);
131 }
132 }
133 // treat next as normal oop. next is a link in the pending list.
134 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
135 debug_only(
136 if(TraceReferenceGC && PrintGCDetails) {
137 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
138 }
139 )
140 PSParallelCompact::mark_and_push(cm, next_addr);
141 ref->instanceKlass::oop_follow_contents(cm, obj);
142 }
144 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
145 oop obj) {
146 if (UseCompressedOops) {
147 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
148 } else {
149 specialized_oop_follow_contents<oop>(this, cm, obj);
150 }
151 }
152 #endif // SERIALGC
154 #ifdef ASSERT
155 template <class T> void trace_reference_gc(const char *s, oop obj,
156 T* referent_addr,
157 T* next_addr,
158 T* discovered_addr) {
159 if(TraceReferenceGC && PrintGCDetails) {
160 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
161 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
162 INTPTR_FORMAT, referent_addr,
163 referent_addr ?
164 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
165 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
166 INTPTR_FORMAT, next_addr,
167 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
168 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
169 INTPTR_FORMAT, discovered_addr,
170 discovered_addr ?
171 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
172 }
173 }
174 #endif
176 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
177 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
178 MarkSweep::adjust_pointer(referent_addr);
179 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
180 MarkSweep::adjust_pointer(next_addr);
181 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
182 MarkSweep::adjust_pointer(discovered_addr);
183 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
184 referent_addr, next_addr, discovered_addr);)
185 }
187 int instanceRefKlass::oop_adjust_pointers(oop obj) {
188 int size = size_helper();
189 instanceKlass::oop_adjust_pointers(obj);
191 if (UseCompressedOops) {
192 specialized_oop_adjust_pointers<narrowOop>(this, obj);
193 } else {
194 specialized_oop_adjust_pointers<oop>(this, obj);
195 }
196 return size;
197 }
199 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
200 if (closure->apply_to_weak_ref_discovered_field()) { \
201 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
202 closure->do_oop##nv_suffix(disc_addr); \
203 } \
204 \
205 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
206 T heap_oop = oopDesc::load_heap_oop(referent_addr); \
207 if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) { \
208 ReferenceProcessor* rp = closure->_ref_processor; \
209 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \
210 if (!referent->is_gc_marked() && (rp != NULL) && \
211 rp->discover_reference(obj, reference_type())) { \
212 return size; \
213 } else { \
214 /* treat referent as normal oop */ \
215 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
216 closure->do_oop##nv_suffix(referent_addr); \
217 } \
218 } \
219 /* treat next as normal oop */ \
220 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
221 if (contains(next_addr)) { \
222 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
223 closure->do_oop##nv_suffix(next_addr); \
224 } \
225 return size; \
228 template <class T> bool contains(T *t) { return true; }
230 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
231 // all closures. Macros calling macros above for each oop size.
233 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
234 \
235 int instanceRefKlass:: \
236 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
237 /* Get size before changing pointers */ \
238 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
239 \
240 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
241 \
242 if (UseCompressedOops) { \
243 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
244 } else { \
245 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
246 } \
247 }
249 #ifndef SERIALGC
250 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
251 \
252 int instanceRefKlass:: \
253 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
254 /* Get size before changing pointers */ \
255 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
256 \
257 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
258 \
259 if (UseCompressedOops) { \
260 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
261 } else { \
262 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
263 } \
264 }
265 #endif // !SERIALGC
268 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
269 \
270 int instanceRefKlass:: \
271 oop_oop_iterate##nv_suffix##_m(oop obj, \
272 OopClosureType* closure, \
273 MemRegion mr) { \
274 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
275 \
276 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
277 if (UseCompressedOops) { \
278 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
279 } else { \
280 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
281 } \
282 }
284 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
285 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
286 #ifndef SERIALGC
287 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
288 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
289 #endif // SERIALGC
290 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
291 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
293 #ifndef SERIALGC
294 template <class T>
295 void specialized_oop_push_contents(instanceRefKlass *ref,
296 PSPromotionManager* pm, oop obj) {
297 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
298 if (PSScavenge::should_scavenge(referent_addr)) {
299 ReferenceProcessor* rp = PSScavenge::reference_processor();
300 if (rp->discover_reference(obj, ref->reference_type())) {
301 // reference already enqueued, referent and next will be traversed later
302 ref->instanceKlass::oop_push_contents(pm, obj);
303 return;
304 } else {
305 // treat referent as normal oop
306 pm->claim_or_forward_depth(referent_addr);
307 }
308 }
309 // treat next as normal oop
310 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
311 if (PSScavenge::should_scavenge(next_addr)) {
312 pm->claim_or_forward_depth(next_addr);
313 }
314 ref->instanceKlass::oop_push_contents(pm, obj);
315 }
317 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
318 if (UseCompressedOops) {
319 specialized_oop_push_contents<narrowOop>(this, pm, obj);
320 } else {
321 specialized_oop_push_contents<oop>(this, pm, obj);
322 }
323 }
325 template <class T>
326 void specialized_oop_update_pointers(instanceRefKlass *ref,
327 ParCompactionManager* cm, oop obj) {
328 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
329 PSParallelCompact::adjust_pointer(referent_addr);
330 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
331 PSParallelCompact::adjust_pointer(next_addr);
332 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
333 PSParallelCompact::adjust_pointer(discovered_addr);
334 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
335 referent_addr, next_addr, discovered_addr);)
336 }
338 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
339 instanceKlass::oop_update_pointers(cm, obj);
340 if (UseCompressedOops) {
341 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
342 } else {
343 specialized_oop_update_pointers<oop>(this, cm, obj);
344 }
345 return size_helper();
346 }
347 #endif // SERIALGC
349 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
350 // Clear the nonstatic oop-map entries corresponding to referent
351 // and nextPending field. They are treated specially by the
352 // garbage collector.
353 // The discovered field is used only by the garbage collector
354 // and is also treated specially.
355 instanceKlass* ik = instanceKlass::cast(k);
357 // Check that we have the right class
358 debug_only(static bool first_time = true);
359 assert(k == SystemDictionary::Reference_klass() && first_time,
360 "Invalid update of maps");
361 debug_only(first_time = false);
362 assert(ik->nonstatic_oop_map_count() == 1, "just checking");
364 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
366 // Check that the current map is (2,4) - currently points at field with
367 // offset 2 (words) and has 4 map entries.
368 debug_only(int offset = java_lang_ref_Reference::referent_offset);
369 debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
370 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
372 if (UseSharedSpaces) {
373 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
374 map->count() == 1, "just checking");
375 } else {
376 assert(map->offset() == offset && map->count() == count,
377 "just checking");
379 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
380 map->set_offset(java_lang_ref_Reference::queue_offset);
381 map->set_count(1);
382 }
383 }
386 // Verification
388 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
389 instanceKlass::oop_verify_on(obj, st);
390 // Verify referent field
391 oop referent = java_lang_ref_Reference::referent(obj);
393 // We should make this general to all heaps
394 GenCollectedHeap* gch = NULL;
395 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
396 gch = GenCollectedHeap::heap();
398 if (referent != NULL) {
399 guarantee(referent->is_oop(), "referent field heap failed");
400 if (gch != NULL && !gch->is_in_youngest(obj)) {
401 // We do a specific remembered set check here since the referent
402 // field is not part of the oop mask and therefore skipped by the
403 // regular verify code.
404 if (UseCompressedOops) {
405 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
406 obj->verify_old_oop(referent_addr, true);
407 } else {
408 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
409 obj->verify_old_oop(referent_addr, true);
410 }
411 }
412 }
413 // Verify next field
414 oop next = java_lang_ref_Reference::next(obj);
415 if (next != NULL) {
416 guarantee(next->is_oop(), "next field verify failed");
417 guarantee(next->is_instanceRef(), "next field verify failed");
418 if (gch != NULL && !gch->is_in_youngest(obj)) {
419 // We do a specific remembered set check here since the next field is
420 // not part of the oop mask and therefore skipped by the regular
421 // verify code.
422 if (UseCompressedOops) {
423 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
424 obj->verify_old_oop(next_addr, true);
425 } else {
426 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
427 obj->verify_old_oop(next_addr, true);
428 }
429 }
430 }
431 }
433 bool instanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
434 if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
435 Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
436 return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
437 }
439 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
440 // we may enter this with pending exception set
441 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
442 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
443 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
444 assert(ObjectSynchronizer::current_thread_holds_lock(
445 JavaThread::current(), h_lock),
446 "Locking should have succeeded");
447 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
448 }
450 void instanceRefKlass::release_and_notify_pending_list_lock(
451 BasicLock *pending_list_basic_lock) {
452 // we may enter this with pending exception set
453 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
454 //
455 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
456 assert(ObjectSynchronizer::current_thread_holds_lock(
457 JavaThread::current(), h_lock),
458 "Lock should be held");
459 // Notify waiters on pending lists lock if there is any reference.
460 if (java_lang_ref_Reference::pending_list() != NULL) {
461 ObjectSynchronizer::notifyall(h_lock, THREAD);
462 }
463 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
464 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
465 }