src/share/vm/oops/instanceRefKlass.cpp

Thu, 05 Jun 2008 15:57:56 -0700

author
ysr
date
Thu, 05 Jun 2008 15:57:56 -0700
changeset 777
37f87013dfd8
parent 548
ba764ed4b6f2
child 791
1ee8caae33af
permissions
-rw-r--r--

6711316: Open source the Garbage-First garbage collector
Summary: First mercurial integration of the code for the Garbage-First garbage collector.
Reviewed-by: apetrusenko, iveresov, jmasa, sgoldman, tonyp, ysr

duke@435 1 /*
duke@435 2 * Copyright 1997-2006 Sun Microsystems, Inc. All Rights Reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
duke@435 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@435 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@435 21 * have any questions.
duke@435 22 *
duke@435 23 */
duke@435 24
duke@435 25 # include "incls/_precompiled.incl"
duke@435 26 # include "incls/_instanceRefKlass.cpp.incl"
duke@435 27
coleenp@548 28 template <class T>
coleenp@548 29 static void specialized_oop_follow_contents(instanceRefKlass* ref, oop obj) {
coleenp@548 30 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 31 oop referent = oopDesc::load_decode_heap_oop(referent_addr);
duke@435 32 debug_only(
duke@435 33 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 34 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
duke@435 35 }
duke@435 36 )
duke@435 37 if (referent != NULL) {
duke@435 38 if (!referent->is_gc_marked() &&
duke@435 39 MarkSweep::ref_processor()->
coleenp@548 40 discover_reference(obj, ref->reference_type())) {
duke@435 41 // reference already enqueued, referent will be traversed later
coleenp@548 42 ref->instanceKlass::oop_follow_contents(obj);
duke@435 43 debug_only(
duke@435 44 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 45 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
duke@435 46 }
duke@435 47 )
duke@435 48 return;
duke@435 49 } else {
duke@435 50 // treat referent as normal oop
duke@435 51 debug_only(
duke@435 52 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 53 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
duke@435 54 }
duke@435 55 )
duke@435 56 MarkSweep::mark_and_push(referent_addr);
duke@435 57 }
duke@435 58 }
duke@435 59 // treat next as normal oop. next is a link in the pending list.
coleenp@548 60 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
duke@435 61 debug_only(
duke@435 62 if(TraceReferenceGC && PrintGCDetails) {
duke@435 63 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
duke@435 64 }
duke@435 65 )
duke@435 66 MarkSweep::mark_and_push(next_addr);
coleenp@548 67 ref->instanceKlass::oop_follow_contents(obj);
coleenp@548 68 }
coleenp@548 69
coleenp@548 70 void instanceRefKlass::oop_follow_contents(oop obj) {
coleenp@548 71 if (UseCompressedOops) {
coleenp@548 72 specialized_oop_follow_contents<narrowOop>(this, obj);
coleenp@548 73 } else {
coleenp@548 74 specialized_oop_follow_contents<oop>(this, obj);
coleenp@548 75 }
duke@435 76 }
duke@435 77
duke@435 78 #ifndef SERIALGC
coleenp@548 79 template <class T>
coleenp@548 80 static void specialized_oop_follow_contents(instanceRefKlass* ref,
coleenp@548 81 ParCompactionManager* cm,
coleenp@548 82 oop obj) {
coleenp@548 83 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 84 oop referent = oopDesc::load_decode_heap_oop(referent_addr);
duke@435 85 debug_only(
duke@435 86 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 87 gclog_or_tty->print_cr("instanceRefKlass::oop_follow_contents " INTPTR_FORMAT, obj);
duke@435 88 }
duke@435 89 )
duke@435 90 if (referent != NULL) {
duke@435 91 if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
duke@435 92 PSParallelCompact::ref_processor()->
coleenp@548 93 discover_reference(obj, ref->reference_type())) {
duke@435 94 // reference already enqueued, referent will be traversed later
coleenp@548 95 ref->instanceKlass::oop_follow_contents(cm, obj);
duke@435 96 debug_only(
duke@435 97 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 98 gclog_or_tty->print_cr(" Non NULL enqueued " INTPTR_FORMAT, obj);
duke@435 99 }
duke@435 100 )
duke@435 101 return;
duke@435 102 } else {
duke@435 103 // treat referent as normal oop
duke@435 104 debug_only(
duke@435 105 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 106 gclog_or_tty->print_cr(" Non NULL normal " INTPTR_FORMAT, obj);
duke@435 107 }
duke@435 108 )
duke@435 109 PSParallelCompact::mark_and_push(cm, referent_addr);
duke@435 110 }
duke@435 111 }
duke@435 112 // treat next as normal oop. next is a link in the pending list.
coleenp@548 113 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
duke@435 114 debug_only(
duke@435 115 if(TraceReferenceGC && PrintGCDetails) {
duke@435 116 gclog_or_tty->print_cr(" Process next as normal " INTPTR_FORMAT, next_addr);
duke@435 117 }
duke@435 118 )
duke@435 119 PSParallelCompact::mark_and_push(cm, next_addr);
coleenp@548 120 ref->instanceKlass::oop_follow_contents(cm, obj);
coleenp@548 121 }
coleenp@548 122
coleenp@548 123 void instanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
coleenp@548 124 oop obj) {
coleenp@548 125 if (UseCompressedOops) {
coleenp@548 126 specialized_oop_follow_contents<narrowOop>(this, cm, obj);
coleenp@548 127 } else {
coleenp@548 128 specialized_oop_follow_contents<oop>(this, cm, obj);
coleenp@548 129 }
duke@435 130 }
duke@435 131 #endif // SERIALGC
duke@435 132
coleenp@548 133 #ifdef ASSERT
coleenp@548 134 template <class T> void trace_reference_gc(const char *s, oop obj,
coleenp@548 135 T* referent_addr,
coleenp@548 136 T* next_addr,
coleenp@548 137 T* discovered_addr) {
coleenp@548 138 if(TraceReferenceGC && PrintGCDetails) {
coleenp@548 139 gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
coleenp@548 140 gclog_or_tty->print_cr(" referent_addr/* " INTPTR_FORMAT " / "
coleenp@548 141 INTPTR_FORMAT, referent_addr,
coleenp@548 142 referent_addr ?
coleenp@548 143 (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
coleenp@548 144 gclog_or_tty->print_cr(" next_addr/* " INTPTR_FORMAT " / "
coleenp@548 145 INTPTR_FORMAT, next_addr,
coleenp@548 146 next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
coleenp@548 147 gclog_or_tty->print_cr(" discovered_addr/* " INTPTR_FORMAT " / "
coleenp@548 148 INTPTR_FORMAT, discovered_addr,
coleenp@548 149 discovered_addr ?
coleenp@548 150 (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
coleenp@548 151 }
coleenp@548 152 }
coleenp@548 153 #endif
coleenp@548 154
coleenp@548 155 template <class T> void specialized_oop_adjust_pointers(instanceRefKlass *ref, oop obj) {
coleenp@548 156 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 157 MarkSweep::adjust_pointer(referent_addr);
coleenp@548 158 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 159 MarkSweep::adjust_pointer(next_addr);
coleenp@548 160 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 161 MarkSweep::adjust_pointer(discovered_addr);
coleenp@548 162 debug_only(trace_reference_gc("instanceRefKlass::oop_adjust_pointers", obj,
coleenp@548 163 referent_addr, next_addr, discovered_addr);)
coleenp@548 164 }
duke@435 165
duke@435 166 int instanceRefKlass::oop_adjust_pointers(oop obj) {
duke@435 167 int size = size_helper();
duke@435 168 instanceKlass::oop_adjust_pointers(obj);
duke@435 169
coleenp@548 170 if (UseCompressedOops) {
coleenp@548 171 specialized_oop_adjust_pointers<narrowOop>(this, obj);
coleenp@548 172 } else {
coleenp@548 173 specialized_oop_adjust_pointers<oop>(this, obj);
duke@435 174 }
duke@435 175 return size;
duke@435 176 }
duke@435 177
coleenp@548 178 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains) \
ysr@777 179 if (closure->apply_to_weak_ref_discovered_field()) { \
ysr@777 180 T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj); \
ysr@777 181 closure->do_oop##nv_suffix(disc_addr); \
ysr@777 182 } \
ysr@777 183 \
coleenp@548 184 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj); \
coleenp@548 185 oop referent = oopDesc::load_decode_heap_oop(referent_addr); \
coleenp@548 186 if (referent != NULL && contains(referent_addr)) { \
coleenp@548 187 ReferenceProcessor* rp = closure->_ref_processor; \
coleenp@548 188 if (!referent->is_gc_marked() && (rp != NULL) && \
coleenp@548 189 rp->discover_reference(obj, reference_type())) { \
coleenp@548 190 return size; \
coleenp@548 191 } else { \
coleenp@548 192 /* treat referent as normal oop */ \
coleenp@548 193 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
coleenp@548 194 closure->do_oop##nv_suffix(referent_addr); \
coleenp@548 195 } \
coleenp@548 196 } \
coleenp@548 197 /* treat next as normal oop */ \
coleenp@548 198 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj); \
coleenp@548 199 if (contains(next_addr)) { \
coleenp@548 200 SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
coleenp@548 201 closure->do_oop##nv_suffix(next_addr); \
coleenp@548 202 } \
coleenp@548 203 return size; \
coleenp@548 204
coleenp@548 205
coleenp@548 206 template <class T> bool contains(T *t) { return true; }
coleenp@548 207
coleenp@548 208 // Macro to define instanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
coleenp@548 209 // all closures. Macros calling macros above for each oop size.
coleenp@548 210
duke@435 211 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
duke@435 212 \
duke@435 213 int instanceRefKlass:: \
duke@435 214 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
duke@435 215 /* Get size before changing pointers */ \
duke@435 216 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
duke@435 217 \
duke@435 218 int size = instanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
duke@435 219 \
coleenp@548 220 if (UseCompressedOops) { \
coleenp@548 221 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
coleenp@548 222 } else { \
coleenp@548 223 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
duke@435 224 } \
duke@435 225 }
duke@435 226
ysr@777 227 #ifndef SERIALGC
ysr@777 228 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
ysr@777 229 \
ysr@777 230 int instanceRefKlass:: \
ysr@777 231 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
ysr@777 232 /* Get size before changing pointers */ \
ysr@777 233 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
ysr@777 234 \
ysr@777 235 int size = instanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
ysr@777 236 \
ysr@777 237 if (UseCompressedOops) { \
ysr@777 238 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains); \
ysr@777 239 } else { \
ysr@777 240 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains); \
ysr@777 241 } \
ysr@777 242 }
ysr@777 243 #endif // !SERIALGC
ysr@777 244
ysr@777 245
duke@435 246 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
duke@435 247 \
duke@435 248 int instanceRefKlass:: \
duke@435 249 oop_oop_iterate##nv_suffix##_m(oop obj, \
duke@435 250 OopClosureType* closure, \
duke@435 251 MemRegion mr) { \
duke@435 252 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
duke@435 253 \
duke@435 254 int size = instanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
coleenp@548 255 if (UseCompressedOops) { \
coleenp@548 256 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
coleenp@548 257 } else { \
coleenp@548 258 InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains); \
duke@435 259 } \
duke@435 260 }
duke@435 261
duke@435 262 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
ysr@777 263 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
ysr@777 264 #ifndef SERIALGC
ysr@777 265 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
ysr@777 266 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
ysr@777 267 #endif // SERIALGC
duke@435 268 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
ysr@777 269 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
duke@435 270
duke@435 271 #ifndef SERIALGC
coleenp@548 272 template <class T>
coleenp@548 273 void specialized_oop_copy_contents(instanceRefKlass *ref,
coleenp@548 274 PSPromotionManager* pm, oop obj) {
duke@435 275 assert(!pm->depth_first(), "invariant");
coleenp@548 276 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 277 if (PSScavenge::should_scavenge(referent_addr)) {
duke@435 278 ReferenceProcessor* rp = PSScavenge::reference_processor();
coleenp@548 279 if (rp->discover_reference(obj, ref->reference_type())) {
duke@435 280 // reference already enqueued, referent and next will be traversed later
coleenp@548 281 ref->instanceKlass::oop_copy_contents(pm, obj);
duke@435 282 return;
duke@435 283 } else {
duke@435 284 // treat referent as normal oop
duke@435 285 pm->claim_or_forward_breadth(referent_addr);
duke@435 286 }
duke@435 287 }
duke@435 288 // treat next as normal oop
coleenp@548 289 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 290 if (PSScavenge::should_scavenge(next_addr)) {
duke@435 291 pm->claim_or_forward_breadth(next_addr);
duke@435 292 }
coleenp@548 293 ref->instanceKlass::oop_copy_contents(pm, obj);
duke@435 294 }
duke@435 295
coleenp@548 296 void instanceRefKlass::oop_copy_contents(PSPromotionManager* pm, oop obj) {
coleenp@548 297 if (UseCompressedOops) {
coleenp@548 298 specialized_oop_copy_contents<narrowOop>(this, pm, obj);
coleenp@548 299 } else {
coleenp@548 300 specialized_oop_copy_contents<oop>(this, pm, obj);
coleenp@548 301 }
coleenp@548 302 }
coleenp@548 303
coleenp@548 304 template <class T>
coleenp@548 305 void specialized_oop_push_contents(instanceRefKlass *ref,
coleenp@548 306 PSPromotionManager* pm, oop obj) {
duke@435 307 assert(pm->depth_first(), "invariant");
coleenp@548 308 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 309 if (PSScavenge::should_scavenge(referent_addr)) {
duke@435 310 ReferenceProcessor* rp = PSScavenge::reference_processor();
coleenp@548 311 if (rp->discover_reference(obj, ref->reference_type())) {
duke@435 312 // reference already enqueued, referent and next will be traversed later
coleenp@548 313 ref->instanceKlass::oop_push_contents(pm, obj);
duke@435 314 return;
duke@435 315 } else {
duke@435 316 // treat referent as normal oop
duke@435 317 pm->claim_or_forward_depth(referent_addr);
duke@435 318 }
duke@435 319 }
duke@435 320 // treat next as normal oop
coleenp@548 321 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 322 if (PSScavenge::should_scavenge(next_addr)) {
duke@435 323 pm->claim_or_forward_depth(next_addr);
duke@435 324 }
coleenp@548 325 ref->instanceKlass::oop_push_contents(pm, obj);
coleenp@548 326 }
coleenp@548 327
coleenp@548 328 void instanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
coleenp@548 329 if (UseCompressedOops) {
coleenp@548 330 specialized_oop_push_contents<narrowOop>(this, pm, obj);
coleenp@548 331 } else {
coleenp@548 332 specialized_oop_push_contents<oop>(this, pm, obj);
coleenp@548 333 }
coleenp@548 334 }
coleenp@548 335
coleenp@548 336 template <class T>
coleenp@548 337 void specialized_oop_update_pointers(instanceRefKlass *ref,
coleenp@548 338 ParCompactionManager* cm, oop obj) {
coleenp@548 339 T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 340 PSParallelCompact::adjust_pointer(referent_addr);
coleenp@548 341 T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 342 PSParallelCompact::adjust_pointer(next_addr);
coleenp@548 343 T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 344 PSParallelCompact::adjust_pointer(discovered_addr);
coleenp@548 345 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
coleenp@548 346 referent_addr, next_addr, discovered_addr);)
duke@435 347 }
duke@435 348
duke@435 349 int instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
duke@435 350 instanceKlass::oop_update_pointers(cm, obj);
coleenp@548 351 if (UseCompressedOops) {
coleenp@548 352 specialized_oop_update_pointers<narrowOop>(this, cm, obj);
coleenp@548 353 } else {
coleenp@548 354 specialized_oop_update_pointers<oop>(this, cm, obj);
coleenp@548 355 }
coleenp@548 356 return size_helper();
coleenp@548 357 }
duke@435 358
duke@435 359
coleenp@548 360 template <class T> void
coleenp@548 361 specialized_oop_update_pointers(ParCompactionManager* cm, oop obj,
coleenp@548 362 HeapWord* beg_addr, HeapWord* end_addr) {
coleenp@548 363 T* p;
coleenp@548 364 T* referent_addr = p = (T*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 365 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 366 T* next_addr = p = (T*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 367 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 368 T* discovered_addr = p = (T*)java_lang_ref_Reference::discovered_addr(obj);
coleenp@548 369 PSParallelCompact::adjust_pointer(p, beg_addr, end_addr);
coleenp@548 370 debug_only(trace_reference_gc("instanceRefKlass::oop_update_ptrs", obj,
coleenp@548 371 referent_addr, next_addr, discovered_addr);)
duke@435 372 }
duke@435 373
duke@435 374 int
duke@435 375 instanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj,
duke@435 376 HeapWord* beg_addr, HeapWord* end_addr) {
duke@435 377 instanceKlass::oop_update_pointers(cm, obj, beg_addr, end_addr);
coleenp@548 378 if (UseCompressedOops) {
coleenp@548 379 specialized_oop_update_pointers<narrowOop>(cm, obj, beg_addr, end_addr);
coleenp@548 380 } else {
coleenp@548 381 specialized_oop_update_pointers<oop>(cm, obj, beg_addr, end_addr);
duke@435 382 }
duke@435 383 return size_helper();
duke@435 384 }
duke@435 385 #endif // SERIALGC
duke@435 386
duke@435 387 void instanceRefKlass::update_nonstatic_oop_maps(klassOop k) {
duke@435 388 // Clear the nonstatic oop-map entries corresponding to referent
duke@435 389 // and nextPending field. They are treated specially by the
duke@435 390 // garbage collector.
duke@435 391 // The discovered field is used only by the garbage collector
duke@435 392 // and is also treated specially.
duke@435 393 instanceKlass* ik = instanceKlass::cast(k);
duke@435 394
duke@435 395 // Check that we have the right class
duke@435 396 debug_only(static bool first_time = true);
duke@435 397 assert(k == SystemDictionary::reference_klass() && first_time,
duke@435 398 "Invalid update of maps");
duke@435 399 debug_only(first_time = false);
duke@435 400 assert(ik->nonstatic_oop_map_size() == 1, "just checking");
duke@435 401
duke@435 402 OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
duke@435 403
duke@435 404 // Check that the current map is (2,4) - currently points at field with
duke@435 405 // offset 2 (words) and has 4 map entries.
duke@435 406 debug_only(int offset = java_lang_ref_Reference::referent_offset);
duke@435 407 debug_only(int length = ((java_lang_ref_Reference::discovered_offset -
coleenp@548 408 java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
duke@435 409
duke@435 410 if (UseSharedSpaces) {
duke@435 411 assert(map->offset() == java_lang_ref_Reference::queue_offset &&
duke@435 412 map->length() == 1, "just checking");
duke@435 413 } else {
duke@435 414 assert(map->offset() == offset && map->length() == length,
duke@435 415 "just checking");
duke@435 416
duke@435 417 // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
duke@435 418 map->set_offset(java_lang_ref_Reference::queue_offset);
duke@435 419 map->set_length(1);
duke@435 420 }
duke@435 421 }
duke@435 422
duke@435 423
duke@435 424 // Verification
duke@435 425
duke@435 426 void instanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
duke@435 427 instanceKlass::oop_verify_on(obj, st);
duke@435 428 // Verify referent field
duke@435 429 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 430
duke@435 431 // We should make this general to all heaps
duke@435 432 GenCollectedHeap* gch = NULL;
duke@435 433 if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
duke@435 434 gch = GenCollectedHeap::heap();
duke@435 435
duke@435 436 if (referent != NULL) {
duke@435 437 guarantee(referent->is_oop(), "referent field heap failed");
coleenp@548 438 if (gch != NULL && !gch->is_in_youngest(obj)) {
duke@435 439 // We do a specific remembered set check here since the referent
duke@435 440 // field is not part of the oop mask and therefore skipped by the
duke@435 441 // regular verify code.
coleenp@548 442 if (UseCompressedOops) {
coleenp@548 443 narrowOop* referent_addr = (narrowOop*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 444 obj->verify_old_oop(referent_addr, true);
coleenp@548 445 } else {
coleenp@548 446 oop* referent_addr = (oop*)java_lang_ref_Reference::referent_addr(obj);
coleenp@548 447 obj->verify_old_oop(referent_addr, true);
coleenp@548 448 }
coleenp@548 449 }
duke@435 450 }
duke@435 451 // Verify next field
duke@435 452 oop next = java_lang_ref_Reference::next(obj);
duke@435 453 if (next != NULL) {
ysr@777 454 guarantee(next->is_oop(), "next field verify failed");
duke@435 455 guarantee(next->is_instanceRef(), "next field verify failed");
duke@435 456 if (gch != NULL && !gch->is_in_youngest(obj)) {
duke@435 457 // We do a specific remembered set check here since the next field is
duke@435 458 // not part of the oop mask and therefore skipped by the regular
duke@435 459 // verify code.
coleenp@548 460 if (UseCompressedOops) {
coleenp@548 461 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 462 obj->verify_old_oop(next_addr, true);
coleenp@548 463 } else {
coleenp@548 464 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 465 obj->verify_old_oop(next_addr, true);
coleenp@548 466 }
duke@435 467 }
duke@435 468 }
duke@435 469 }
duke@435 470
duke@435 471 void instanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
duke@435 472 // we may enter this with pending exception set
duke@435 473 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
duke@435 474 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
duke@435 475 ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
duke@435 476 assert(ObjectSynchronizer::current_thread_holds_lock(
duke@435 477 JavaThread::current(), h_lock),
duke@435 478 "Locking should have succeeded");
duke@435 479 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
duke@435 480 }
duke@435 481
duke@435 482 void instanceRefKlass::release_and_notify_pending_list_lock(
duke@435 483 BasicLock *pending_list_basic_lock) {
duke@435 484 // we may enter this with pending exception set
duke@435 485 PRESERVE_EXCEPTION_MARK; // exceptions are never thrown, needed for TRAPS argument
duke@435 486 //
duke@435 487 Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
duke@435 488 assert(ObjectSynchronizer::current_thread_holds_lock(
duke@435 489 JavaThread::current(), h_lock),
duke@435 490 "Lock should be held");
duke@435 491 // Notify waiters on pending lists lock if there is any reference.
duke@435 492 if (java_lang_ref_Reference::pending_list() != NULL) {
duke@435 493 ObjectSynchronizer::notifyall(h_lock, THREAD);
duke@435 494 }
duke@435 495 ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
duke@435 496 if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
duke@435 497 }

mercurial