src/share/vm/oops/instanceRefKlass.cpp

Fri, 10 Dec 2010 12:13:03 -0500

author
coleenp
date
Fri, 10 Dec 2010 12:13:03 -0500
changeset 2363
7cf1a74771e8
parent 2314
f95d63e2154a
child 2367
b03e6b4c7c75
permissions
-rw-r--r--

6988439: Parallel Class Loading test deadlock involving MethodData_lock and Pending List Lock
Summary: Don't acquire methodData_lock while holding pending list lock
Reviewed-by: kvn, never, ysr

duke@435 1 /*
stefank@2314 2 * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/javaClasses.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "gc_implementation/shared/markSweep.inline.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.hpp"
stefank@2314 30 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 31 #include "memory/genCollectedHeap.hpp"
stefank@2314 32 #include "memory/genOopClosures.inline.hpp"
stefank@2314 33 #include "oops/instanceRefKlass.hpp"
stefank@2314 34 #include "oops/oop.inline.hpp"
stefank@2314 35 #include "utilities/preserveException.hpp"
stefank@2314 36 #ifndef SERIALGC
stefank@2314 37 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
stefank@2314 38 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
stefank@2314 39 #include "gc_implementation/g1/g1RemSet.inline.hpp"
stefank@2314 40 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
stefank@2314 41 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
stefank@2314 42 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
stefank@2314 43 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
stefank@2314 44 #include "oops/oop.pcgc.inline.hpp"
stefank@2314 45 #endif
duke@435 46
coleenp@548 47 template <class T>
coleenp@548 48 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
coleenp@548 49 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
ysr@1280 50 T heap_oop = oopDesc::load_heap_oop(referent_addr);
duke@435 51 debug_only(
duke@435 52 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 53 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
duke@435 54 }
duke@435 55 )
ysr@1280 56 if (!oopDesc::is_null(heap_oop)) {
ysr@1280 57 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
duke@435 58 if (!referent->is_gc_marked() &&
duke@435 59 MarkSweep::ref_processor()->
coleenp@548 60 discover_reference(obj, ref->reference_type())) {
duke@435 61 // reference already enqueued, referent will be traversed later
coleenp@548 62 ref->instanceKlass::oop_follow_contents(obj);
duke@435 63 debug_only(
duke@435 64 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 65 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
duke@435 66 }
duke@435 67 )
duke@435 68 return;
duke@435 69 } else {
duke@435 70 // treat referent as normal oop
duke@435 71 debug_only(
duke@435 72 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 73 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
duke@435 74 }
duke@435 75 )
duke@435 76 MarkSweep::mark_and_push(referent_addr);
duke@435 77 }
duke@435 78 }
duke@435 79 // treat next as normal oop. next is a link in the pending list.
coleenp@548 80 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
duke@435 81 debug_only(
duke@435 82 if(TraceReferenceGC && PrintGCDetails) {
duke@435 83 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
duke@435 84 }
duke@435 85 )
duke@435 86 MarkSweep::mark_and_push(next_addr);
coleenp@548 87 ref->instanceKlass::oop_follow_contents(obj);
coleenp@548 88 }
coleenp@548 89
coleenp@548 90 void instanceRefKlass::oop_follow_contents(oop obj) {
coleenp@548 91 if (UseCompressedOops) {
coleenp@548 92 specialized_oop_follow_contents<narrowOop>(this, obj);
coleenp@548 93 } else {
coleenp@548 94 specialized_oop_follow_contents<oop>(this, obj);
coleenp@548 95 }
duke@435 96 }
duke@435 97
duke@435 98 #ifndef SERIALGC
coleenp@548 99 template <class T>
phh@1558 100 void specialized_oop_follow_contents(instanceRefKlass* ref,
phh@1558 101 ParCompactionManager* cm,
phh@1558 102 oop obj) {
coleenp@548 103 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
ysr@1280 104 T heap_oop = oopDesc::load_heap_oop(referent_addr);
duke@435 105 debug_only(
duke@435 106 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 107 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
duke@435 108 }
duke@435 109 )
ysr@1280 110 if (!oopDesc::is_null(heap_oop)) {
ysr@1280 111 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
duke@435 112 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
duke@435 113 PSParallelCompact::ref_processor()->
coleenp@548 114 discover_reference(obj, ref->reference_type())) {
duke@435 115 // reference already enqueued, referent will be traversed later
coleenp@548 116 ref->instanceKlass::oop_follow_contents(cm, obj);
duke@435 117 debug_only(
duke@435 118 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 119 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
duke@435 120 }
duke@435 121 )
duke@435 122 return;
duke@435 123 } else {
duke@435 124 // treat referent as normal oop
duke@435 125 debug_only(
duke@435 126 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 127 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
duke@435 128 }
duke@435 129 )
duke@435 130 PSParallelCompact::mark_and_push(cm, referent_addr);
duke@435 131 }
duke@435 132 }
duke@435 133 // treat next as normal oop. next is a link in the pending list.
coleenp@548 134 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
duke@435 135 debug_only(
duke@435 136 if(TraceReferenceGC && PrintGCDetails) {
duke@435 137 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
duke@435 138 }
duke@435 139 )
duke@435 140 PSParallelCompact::mark_and_push(cm, next_addr);
coleenp@548 141 ref->instanceKlass::oop_follow_contents(cm, obj);
coleenp@548 142 }
coleenp@548 143
coleenp@548 144 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
coleenp@548 145 oop obj) {
coleenp@548 146 if (UseCompressedOops) {
coleenp@548 147 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
coleenp@548 148 } else {
coleenp@548 149 specialized_oop_follow_contents<oop>(this, cm, obj);
coleenp@548 150 }
duke@435 151 }
duke@435 152 #endif // SERIALGC
duke@435 153
coleenp@548 154 #ifdef ASSERT
coleenp@548 155 template <class T> void trace_reference_gc(const char *s, oop obj,
coleenp@548 156 T* referent_addr,
coleenp@548 157 T* next_addr,
coleenp@548 158 T* discovered_addr) {
coleenp@548 159 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 160 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
coleenp@548 161 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
coleenp@548 162 INTPTR_FORMAT, referent_addr,
coleenp@548 163 referent_addr ?
coleenp@548 164 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
coleenp@548 165 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
coleenp@548 166 INTPTR_FORMAT, next_addr,
coleenp@548 167 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
coleenp@548 168 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
coleenp@548 169 INTPTR_FORMAT, discovered_addr,
coleenp@548 170 discovered_addr ?
coleenp@548 171 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
coleenp@548 172 }
coleenp@548 173 }
coleenp@548 174 #endif
coleenp@548 175
coleenp@548 176 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
coleenp@548 177 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 178 MarkSweep::adjust_pointer(referent_addr);
coleenp@548 179 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 180 MarkSweep::adjust_pointer(next_addr);
coleenp@548 181 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 182 MarkSweep::adjust_pointer(discovered_addr);
coleenp@548 183 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
coleenp@548 184 referent_addr, next_addr, discovered_addr);)
coleenp@548 185 }
duke@435 186
duke@435 187 int instanceRefKlass::oop_adjust_pointers(oop obj) {
duke@435 188 int size = size_helper();
duke@435 189 instanceKlass::oop_adjust_pointers(obj);
duke@435 190
coleenp@548 191 if (UseCompressedOops) {
coleenp@548 192 specialized_oop_adjust_pointers<narrowOop>(this, obj);
coleenp@548 193 } else {
coleenp@548 194 specialized_oop_adjust_pointers<oop>(this, obj);
duke@435 195 }
duke@435 196 return size;
duke@435 197 }
duke@435 198
coleenp@548 199 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
ysr@777 200 if (closure->apply_to_weak_ref_discovered_field()) { \
ysr@777 201 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
ysr@777 202 closure->do_oop##nv_suffix(disc_addr); \
ysr@777 203 } \
ysr@777 204 \
coleenp@548 205 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
ysr@1280 206 T heap_oop = oopDesc::load_heap_oop(referent_addr); \
ysr@1280 207 if (!oopDesc::is_null(heap_oop) && contains(referent_addr)) { \
coleenp@548 208 ReferenceProcessor* rp = closure->_ref_processor; \
ysr@1280 209 oop referent = oopDesc::decode_heap_oop_not_null(heap_oop); \
coleenp@548 210 if (!referent->is_gc_marked() && (rp != NULL) && \
coleenp@548 211 rp->discover_reference(obj, reference_type())) { \
coleenp@548 212 return size; \
coleenp@548 213 } else { \
coleenp@548 214 /* treat referent as normal oop */ \
coleenp@548 215 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
coleenp@548 216 closure->do_oop##nv_suffix(referent_addr); \
coleenp@548 217 } \
coleenp@548 218 } \
coleenp@548 219 /* treat next as normal oop */ \
coleenp@548 220 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
coleenp@548 221 if (contains(next_addr)) { \
coleenp@548 222 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
coleenp@548 223 closure->do_oop##nv_suffix(next_addr); \
coleenp@548 224 } \
coleenp@548 225 return size; \
coleenp@548 226
coleenp@548 227
coleenp@548 228 template <class T> bool contains(T *t) { return true; }
coleenp@548 229
coleenp@548 230 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
coleenp@548 231 // all closures. Macros calling macros above for each oop size.
coleenp@548 232
duke@435 233 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
duke@435 234 \
duke@435 235 int instanceRefKlass:: \
duke@435 236 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
duke@435 237 /* Get size before changing pointers */ \
duke@435 238 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
duke@435 239 \
duke@435 240 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
duke@435 241 \
coleenp@548 242 if (UseCompressedOops) { \
coleenp@548 243 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
coleenp@548 244 } else { \
coleenp@548 245 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
duke@435 246 } \
duke@435 247 }
duke@435 248
ysr@777 249 #ifndef SERIALGC
ysr@777 250 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
ysr@777 251 \
ysr@777 252 int instanceRefKlass:: \
ysr@777 253 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
ysr@777 254 /* Get size before changing pointers */ \
ysr@777 255 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
ysr@777 256 \
ysr@777 257 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
ysr@777 258 \
ysr@777 259 if (UseCompressedOops) { \
ysr@777 260 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
ysr@777 261 } else { \
ysr@777 262 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
ysr@777 263 } \
ysr@777 264 }
ysr@777 265 #endif // !SERIALGC
ysr@777 266
ysr@777 267
duke@435 268 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
duke@435 269 \
duke@435 270 int instanceRefKlass:: \
duke@435 271 oop_oop_iterate##nv_suffix##_m(oop obj, \
duke@435 272 OopClosureType* closure, \
duke@435 273 MemRegion mr) { \
duke@435 274 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
duke@435 275 \
duke@435 276 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
coleenp@548 277 if (UseCompressedOops) { \
coleenp@548 278 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
coleenp@548 279 } else { \
coleenp@548 280 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
duke@435 281 } \
duke@435 282 }
duke@435 283
duke@435 284 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
ysr@777 285 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
ysr@777 286 #ifndef SERIALGC
ysr@777 287 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
ysr@777 288 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
ysr@777 289 #endif // SERIALGC
duke@435 290 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
ysr@777 291 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
duke@435 292
duke@435 293 #ifndef SERIALGC
coleenp@548 294 template <class T>
coleenp@548 295 void specialized_oop_push_contents(instanceRefKlass *ref,
coleenp@548 296 PSPromotionManager* pm, oop obj) {
coleenp@548 297 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 298 if (PSScavenge::should_scavenge(referent_addr)) {
duke@435 299 ReferenceProcessor* rp = PSScavenge::reference_processor();
coleenp@548 300 if (rp->discover_reference(obj, ref->reference_type())) {
duke@435 301 // reference already enqueued, referent and next will be traversed later
coleenp@548 302 ref->instanceKlass::oop_push_contents(pm, obj);
duke@435 303 return;
duke@435 304 } else {
duke@435 305 // treat referent as normal oop
duke@435 306 pm->claim_or_forward_depth(referent_addr);
duke@435 307 }
duke@435 308 }
duke@435 309 // treat next as normal oop
coleenp@548 310 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 311 if (PSScavenge::should_scavenge(next_addr)) {
duke@435 312 pm->claim_or_forward_depth(next_addr);
duke@435 313 }
coleenp@548 314 ref->instanceKlass::oop_push_contents(pm, obj);
coleenp@548 315 }
coleenp@548 316
coleenp@548 317 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
coleenp@548 318 if (UseCompressedOops) {
coleenp@548 319 specialized_oop_push_contents<narrowOop>(this, pm, obj);
coleenp@548 320 } else {
coleenp@548 321 specialized_oop_push_contents<oop>(this, pm, obj);
coleenp@548 322 }
coleenp@548 323 }
coleenp@548 324
coleenp@548 325 template <class T>
coleenp@548 326 void specialized_oop_update_pointers(instanceRefKlass *ref,
coleenp@548 327 ParCompactionManager* cm, oop obj) {
coleenp@548 328 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 329 PSParallelCompact::adjust_pointer(referent_addr);
coleenp@548 330 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 331 PSParallelCompact::adjust_pointer(next_addr);
coleenp@548 332 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 333 PSParallelCompact::adjust_pointer(discovered_addr);
coleenp@548 334 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
coleenp@548 335 referent_addr, next_addr, discovered_addr);)
duke@435 336 }
duke@435 337
duke@435 338 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
duke@435 339 instanceKlass::oop_update_pointers(cm, obj);
coleenp@548 340 if (UseCompressedOops) {
coleenp@548 341 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
coleenp@548 342 } else {
coleenp@548 343 specialized_oop_update_pointers<oop>(this, cm, obj);
coleenp@548 344 }
coleenp@548 345 return size_helper();
coleenp@548 346 }
duke@435 347
duke@435 348
coleenp@548 349 template <class T> void
coleenp@548 350 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
coleenp@548 351 HeapWord* beg_addr, HeapWord* end_addr) {
coleenp@548 352 T* p;
coleenp@548 353 T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 354 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 355 T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 356 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 357 T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 358 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 359 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
coleenp@548 360 referent_addr, next_addr, discovered_addr);)
duke@435 361 }
duke@435 362
duke@435 363 int
duke@435 364 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
duke@435 365 HeapWord* beg_addr, HeapWord* end_addr) {
duke@435 366 instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
coleenp@548 367 if (UseCompressedOops) {
coleenp@548 368 specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
coleenp@548 369 } else {
coleenp@548 370 specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
duke@435 371 }
duke@435 372 return size_helper();
duke@435 373 }
duke@435 374 #endif // SERIALGC
duke@435 375
duke@435 376 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
duke@435 377 // Clear the nonstatic oop-map entries corresponding to referent
duke@435 378 // and nextPending field. They are treated specially by the
duke@435 379 // garbage collector.
duke@435 380 // The discovered field is used only by the garbage collector
duke@435 381 // and is also treated specially.
duke@435 382 instanceKlass* ik = instanceKlass::cast(k);
duke@435 383
duke@435 384 // Check that we have the right class
duke@435 385 debug_only(static bool first_time = true);
never@1577 386 assert(k == SystemDictionary::Reference_klass() && first_time,
duke@435 387 "Invalid update of maps");
duke@435 388 debug_only(first_time = false);
jcoomes@1373 389 assert(ik->nonstatic_oop_map_count() == 1, "just checking");
duke@435 390
duke@435 391 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
duke@435 392
duke@435 393 // Check that the current map is (2,4) - currently points at field with
duke@435 394 // offset 2 (words) and has 4 map entries.
duke@435 395 debug_only(int offset = java_lang_ref_Reference::referent_offset);
jcoomes@1374 396 debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
coleenp@548 397 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
duke@435 398
duke@435 399 if (UseSharedSpaces) {
duke@435 400 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
jcoomes@1373 401 map->count() == 1, "just checking");
duke@435 402 } else {
jcoomes@1373 403 assert(map->offset() == offset && map->count() == count,
duke@435 404 "just checking");
duke@435 405
duke@435 406 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
duke@435 407 map->set_offset(java_lang_ref_Reference::queue_offset);
jcoomes@1373 408 map->set_count(1);
duke@435 409 }
duke@435 410 }
duke@435 411
duke@435 412
duke@435 413 // Verification
duke@435 414
duke@435 415 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
duke@435 416 instanceKlass::oop_verify_on(obj, st);
duke@435 417 // Verify referent field
duke@435 418 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 419
duke@435 420 // We should make this general to all heaps
duke@435 421 GenCollectedHeap* gch = NULL;
duke@435 422 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
duke@435 423 gch = GenCollectedHeap::heap();
duke@435 424
duke@435 425 if (referent != NULL) {
duke@435 426 guarantee(referent->is_oop(), "referent field heap failed");
coleenp@548 427 if (gch != NULL && !gch->is_in_youngest(obj)) {
duke@435 428 // We do a specific remembered set check here since the referent
duke@435 429 // field is not part of the oop mask and therefore skipped by the
duke@435 430 // regular verify code.
coleenp@548 431 if (UseCompressedOops) {
coleenp@548 432 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 433 obj->verify_old_oop(referent_addr, true);
coleenp@548 434 } else {
coleenp@548 435 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 436 obj->verify_old_oop(referent_addr, true);
coleenp@548 437 }
coleenp@548 438 }
duke@435 439 }
duke@435 440 // Verify next field
duke@435 441 oop next = java_lang_ref_Reference::next(obj);
duke@435 442 if (next != NULL) {
ysr@777 443 guarantee(next->is_oop(), "next field verify failed");
duke@435 444 guarantee(next->is_instanceRef(), "next field verify failed");
duke@435 445 if (gch != NULL && !gch->is_in_youngest(obj)) {
duke@435 446 // We do a specific remembered set check here since the next field is
duke@435 447 // not part of the oop mask and therefore skipped by the regular
duke@435 448 // verify code.
coleenp@548 449 if (UseCompressedOops) {
coleenp@548 450 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 451 obj->verify_old_oop(next_addr, true);
coleenp@548 452 } else {
coleenp@548 453 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 454 obj->verify_old_oop(next_addr, true);
coleenp@548 455 }
duke@435 456 }
duke@435 457 }
duke@435 458 }
duke@435 459
coleenp@2363 460 bool instanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
coleenp@2363 461 Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
coleenp@2363 462 return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
coleenp@2363 463 }
coleenp@2363 464
duke@435 465 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
duke@435 466 // we may enter this with pending exception set
duke@435 467 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
duke@435 468 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
duke@435 469 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
duke@435 470 assert(ObjectSynchronizer::current_thread_holds_lock(
duke@435 471 JavaThread::current(), h_lock),
duke@435 472 "Locking should have succeeded");
duke@435 473 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
duke@435 474 }
duke@435 475
duke@435 476 void instanceRefKlass::release_and_notify_pending_list_lock(
duke@435 477 BasicLock *pending_list_basic_lock) {
duke@435 478 // we may enter this with pending exception set
duke@435 479 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
duke@435 480 //
duke@435 481 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
duke@435 482 assert(ObjectSynchronizer::current_thread_holds_lock(
duke@435 483 JavaThread::current(), h_lock),
duke@435 484 "Lock should be held");
duke@435 485 // Notify waiters on pending lists lock if there is any reference.
duke@435 486 if (java_lang_ref_Reference::pending_list() != NULL) {
duke@435 487 ObjectSynchronizer::notifyall(h_lock, THREAD);
duke@435 488 }
duke@435 489 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
duke@435 490 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
duke@435 491 }

mercurial