src/share/vm/oops/instanceRefKlass.cpp

changeset 0
f90c822e73f8
child 6876
710a3c8b516e
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/share/vm/oops/instanceRefKlass.cpp	Wed Apr 27 01:25:04 2016 +0800
     1.3 @@ -0,0 +1,555 @@
     1.4 +/*
     1.5 + * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
     1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     1.7 + *
     1.8 + * This code is free software; you can redistribute it and/or modify it
     1.9 + * under the terms of the GNU General Public License version 2 only, as
    1.10 + * published by the Free Software Foundation.
    1.11 + *
    1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT
    1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    1.14 + * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    1.15 + * version 2 for more details (a copy is included in the LICENSE file that
    1.16 + * accompanied this code).
    1.17 + *
    1.18 + * You should have received a copy of the GNU General Public License version
    1.19 + * 2 along with this work; if not, write to the Free Software Foundation,
    1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    1.21 + *
    1.22 + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    1.23 + * or visit www.oracle.com if you need additional information or have any
    1.24 + * questions.
    1.25 + *
    1.26 + */
    1.27 +
    1.28 +#include "precompiled.hpp"
    1.29 +#include "classfile/javaClasses.hpp"
    1.30 +#include "classfile/systemDictionary.hpp"
    1.31 +#include "gc_implementation/shared/markSweep.inline.hpp"
    1.32 +#include "gc_interface/collectedHeap.hpp"
    1.33 +#include "gc_interface/collectedHeap.inline.hpp"
    1.34 +#include "memory/genCollectedHeap.hpp"
    1.35 +#include "memory/genOopClosures.inline.hpp"
    1.36 +#include "oops/instanceRefKlass.hpp"
    1.37 +#include "oops/oop.inline.hpp"
    1.38 +#include "utilities/preserveException.hpp"
    1.39 +#include "utilities/macros.hpp"
    1.40 +#if INCLUDE_ALL_GCS
    1.41 +#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
    1.42 +#include "gc_implementation/g1/g1OopClosures.inline.hpp"
    1.43 +#include "gc_implementation/g1/g1RemSet.inline.hpp"
    1.44 +#include "gc_implementation/g1/heapRegionSeq.inline.hpp"
    1.45 +#include "gc_implementation/parNew/parOopClosures.inline.hpp"
    1.46 +#include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
    1.47 +#include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
    1.48 +#include "oops/oop.pcgc.inline.hpp"
    1.49 +#endif // INCLUDE_ALL_GCS
    1.50 +
    1.51 +PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
    1.52 +
    1.53 +template <class T>
    1.54 +void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
    1.55 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
    1.56 +  T heap_oop = oopDesc::load_heap_oop(referent_addr);
    1.57 +  debug_only(
    1.58 +    if(TraceReferenceGC && PrintGCDetails) {
    1.59 +      gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
    1.60 +    }
    1.61 +  )
    1.62 +  if (!oopDesc::is_null(heap_oop)) {
    1.63 +    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
    1.64 +    if (!referent->is_gc_marked() &&
    1.65 +        MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
    1.66 +      // reference was discovered, referent will be traversed later
    1.67 +      ref->InstanceKlass::oop_follow_contents(obj);
    1.68 +      debug_only(
    1.69 +        if(TraceReferenceGC && PrintGCDetails) {
    1.70 +          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
    1.71 +        }
    1.72 +      )
    1.73 +      return;
    1.74 +    } else {
    1.75 +      // treat referent as normal oop
    1.76 +      debug_only(
    1.77 +        if(TraceReferenceGC && PrintGCDetails) {
    1.78 +          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
    1.79 +        }
    1.80 +      )
    1.81 +      MarkSweep::mark_and_push(referent_addr);
    1.82 +    }
    1.83 +  }
    1.84 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
    1.85 +  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
    1.86 +    // Treat discovered as normal oop, if ref is not "active",
    1.87 +    // i.e. if next is non-NULL.
    1.88 +    T  next_oop = oopDesc::load_heap_oop(next_addr);
    1.89 +    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
    1.90 +      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
    1.91 +      debug_only(
    1.92 +        if(TraceReferenceGC && PrintGCDetails) {
    1.93 +          gclog_or_tty->print_cr("   Process discovered as normal "
    1.94 +                                 INTPTR_FORMAT, discovered_addr);
    1.95 +        }
    1.96 +      )
    1.97 +      MarkSweep::mark_and_push(discovered_addr);
    1.98 +    }
    1.99 +  } else {
   1.100 +#ifdef ASSERT
   1.101 +    // In the case of older JDKs which do not use the discovered
   1.102 +    // field for the pending list, an inactive ref (next != NULL)
   1.103 +    // must always have a NULL discovered field.
   1.104 +    oop next = oopDesc::load_decode_heap_oop(next_addr);
   1.105 +    oop discovered = java_lang_ref_Reference::discovered(obj);
   1.106 +    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   1.107 +           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   1.108 +                   (oopDesc*)obj));
   1.109 +#endif
   1.110 +  }
   1.111 +  // treat next as normal oop.  next is a link in the reference queue.
   1.112 +  debug_only(
   1.113 +    if(TraceReferenceGC && PrintGCDetails) {
   1.114 +      gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
   1.115 +    }
   1.116 +  )
   1.117 +  MarkSweep::mark_and_push(next_addr);
   1.118 +  ref->InstanceKlass::oop_follow_contents(obj);
   1.119 +}
   1.120 +
   1.121 +void InstanceRefKlass::oop_follow_contents(oop obj) {
   1.122 +  if (UseCompressedOops) {
   1.123 +    specialized_oop_follow_contents<narrowOop>(this, obj);
   1.124 +  } else {
   1.125 +    specialized_oop_follow_contents<oop>(this, obj);
   1.126 +  }
   1.127 +}
   1.128 +
   1.129 +#if INCLUDE_ALL_GCS
   1.130 +template <class T>
   1.131 +void specialized_oop_follow_contents(InstanceRefKlass* ref,
   1.132 +                                     ParCompactionManager* cm,
   1.133 +                                     oop obj) {
   1.134 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   1.135 +  T heap_oop = oopDesc::load_heap_oop(referent_addr);
   1.136 +  debug_only(
   1.137 +    if(TraceReferenceGC && PrintGCDetails) {
   1.138 +      gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
   1.139 +    }
   1.140 +  )
   1.141 +  if (!oopDesc::is_null(heap_oop)) {
   1.142 +    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
   1.143 +    if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
   1.144 +        PSParallelCompact::ref_processor()->
   1.145 +          discover_reference(obj, ref->reference_type())) {
   1.146 +      // reference already enqueued, referent will be traversed later
   1.147 +      ref->InstanceKlass::oop_follow_contents(cm, obj);
   1.148 +      debug_only(
   1.149 +        if(TraceReferenceGC && PrintGCDetails) {
   1.150 +          gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
   1.151 +        }
   1.152 +      )
   1.153 +      return;
   1.154 +    } else {
   1.155 +      // treat referent as normal oop
   1.156 +      debug_only(
   1.157 +        if(TraceReferenceGC && PrintGCDetails) {
   1.158 +          gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
   1.159 +        }
   1.160 +      )
   1.161 +      PSParallelCompact::mark_and_push(cm, referent_addr);
   1.162 +    }
   1.163 +  }
   1.164 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   1.165 +  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   1.166 +    // Treat discovered as normal oop, if ref is not "active",
   1.167 +    // i.e. if next is non-NULL.
   1.168 +    T  next_oop = oopDesc::load_heap_oop(next_addr);
   1.169 +    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   1.170 +      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   1.171 +      debug_only(
   1.172 +        if(TraceReferenceGC && PrintGCDetails) {
   1.173 +          gclog_or_tty->print_cr("   Process discovered as normal "
   1.174 +                                 INTPTR_FORMAT, discovered_addr);
   1.175 +        }
   1.176 +      )
   1.177 +      PSParallelCompact::mark_and_push(cm, discovered_addr);
   1.178 +    }
   1.179 +  } else {
   1.180 +#ifdef ASSERT
   1.181 +    // In the case of older JDKs which do not use the discovered
   1.182 +    // field for the pending list, an inactive ref (next != NULL)
   1.183 +    // must always have a NULL discovered field.
   1.184 +    T next = oopDesc::load_heap_oop(next_addr);
   1.185 +    oop discovered = java_lang_ref_Reference::discovered(obj);
   1.186 +    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   1.187 +           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   1.188 +                   (oopDesc*)obj));
   1.189 +#endif
   1.190 +  }
   1.191 +  PSParallelCompact::mark_and_push(cm, next_addr);
   1.192 +  ref->InstanceKlass::oop_follow_contents(cm, obj);
   1.193 +}
   1.194 +
   1.195 +void InstanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
   1.196 +                                           oop obj) {
   1.197 +  if (UseCompressedOops) {
   1.198 +    specialized_oop_follow_contents<narrowOop>(this, cm, obj);
   1.199 +  } else {
   1.200 +    specialized_oop_follow_contents<oop>(this, cm, obj);
   1.201 +  }
   1.202 +}
   1.203 +#endif // INCLUDE_ALL_GCS
   1.204 +
   1.205 +#ifdef ASSERT
   1.206 +template <class T> void trace_reference_gc(const char *s, oop obj,
   1.207 +                                           T* referent_addr,
   1.208 +                                           T* next_addr,
   1.209 +                                           T* discovered_addr) {
   1.210 +  if(TraceReferenceGC && PrintGCDetails) {
   1.211 +    gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
   1.212 +    gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
   1.213 +         INTPTR_FORMAT, referent_addr,
   1.214 +         referent_addr ?
   1.215 +           (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
   1.216 +    gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
   1.217 +         INTPTR_FORMAT, next_addr,
   1.218 +         next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
   1.219 +    gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
   1.220 +         INTPTR_FORMAT, discovered_addr,
   1.221 +         discovered_addr ?
   1.222 +           (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
   1.223 +  }
   1.224 +}
   1.225 +#endif
   1.226 +
   1.227 +template <class T> void specialized_oop_adjust_pointers(InstanceRefKlass *ref, oop obj) {
   1.228 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   1.229 +  MarkSweep::adjust_pointer(referent_addr);
   1.230 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   1.231 +  MarkSweep::adjust_pointer(next_addr);
   1.232 +  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   1.233 +  MarkSweep::adjust_pointer(discovered_addr);
   1.234 +  debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
   1.235 +                                referent_addr, next_addr, discovered_addr);)
   1.236 +}
   1.237 +
   1.238 +int InstanceRefKlass::oop_adjust_pointers(oop obj) {
   1.239 +  int size = size_helper();
   1.240 +  InstanceKlass::oop_adjust_pointers(obj);
   1.241 +
   1.242 +  if (UseCompressedOops) {
   1.243 +    specialized_oop_adjust_pointers<narrowOop>(this, obj);
   1.244 +  } else {
   1.245 +    specialized_oop_adjust_pointers<oop>(this, obj);
   1.246 +  }
   1.247 +  return size;
   1.248 +}
   1.249 +
   1.250 +#define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
   1.251 +  T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);             \
   1.252 +  if (closure->apply_to_weak_ref_discovered_field()) {                          \
   1.253 +    closure->do_oop##nv_suffix(disc_addr);                                      \
   1.254 +  }                                                                             \
   1.255 +                                                                                \
   1.256 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
   1.257 +  T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
   1.258 +  ReferenceProcessor* rp = closure->_ref_processor;                             \
   1.259 +  if (!oopDesc::is_null(heap_oop)) {                                            \
   1.260 +    oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
   1.261 +    if (!referent->is_gc_marked() && (rp != NULL) &&                            \
   1.262 +        rp->discover_reference(obj, reference_type())) {                        \
   1.263 +      return size;                                                              \
   1.264 +    } else if (contains(referent_addr)) {                                       \
   1.265 +      /* treat referent as normal oop */                                        \
   1.266 +      SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   1.267 +      closure->do_oop##nv_suffix(referent_addr);                                \
   1.268 +    }                                                                           \
   1.269 +  }                                                                             \
   1.270 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
   1.271 +  if (ReferenceProcessor::pending_list_uses_discovered_field()) {               \
   1.272 +    T next_oop  = oopDesc::load_heap_oop(next_addr);                            \
   1.273 +    /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
   1.274 +    if (!oopDesc::is_null(next_oop) && contains(disc_addr)) {                   \
   1.275 +        /* i.e. ref is not "active" */                                          \
   1.276 +      debug_only(                                                               \
   1.277 +        if(TraceReferenceGC && PrintGCDetails) {                                \
   1.278 +          gclog_or_tty->print_cr("   Process discovered as normal "             \
   1.279 +                                 INTPTR_FORMAT, disc_addr);                     \
   1.280 +        }                                                                       \
   1.281 +      )                                                                         \
   1.282 +      SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   1.283 +      closure->do_oop##nv_suffix(disc_addr);                                    \
   1.284 +    }                                                                           \
   1.285 +  } else {                                                                      \
   1.286 +    /* In the case of older JDKs which do not use the discovered field for  */  \
   1.287 +    /* the pending list, an inactive ref (next != NULL) must always have a  */  \
   1.288 +    /* NULL discovered field. */                                                \
   1.289 +    debug_only(                                                                 \
   1.290 +      T next_oop = oopDesc::load_heap_oop(next_addr);                           \
   1.291 +      T disc_oop = oopDesc::load_heap_oop(disc_addr);                           \
   1.292 +      assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop),          \
   1.293 +           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
   1.294 +                   "discovered field", (oopDesc*)obj));                                   \
   1.295 +    )                                                                           \
   1.296 +  }                                                                             \
   1.297 +  /* treat next as normal oop */                                                \
   1.298 +  if (contains(next_addr)) {                                                    \
   1.299 +    SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
   1.300 +    closure->do_oop##nv_suffix(next_addr);                                      \
   1.301 +  }                                                                             \
   1.302 +  return size;                                                                  \
   1.303 +
   1.304 +
   1.305 +template <class T> bool contains(T *t) { return true; }
   1.306 +
   1.307 +// Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
   1.308 +// all closures.  Macros calling macros above for each oop size.
   1.309 +
   1.310 +#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
   1.311 +                                                                                \
   1.312 +int InstanceRefKlass::                                                          \
   1.313 +oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
   1.314 +  /* Get size before changing pointers */                                       \
   1.315 +  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   1.316 +                                                                                \
   1.317 +  int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
   1.318 +                                                                                \
   1.319 +  if (UseCompressedOops) {                                                      \
   1.320 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   1.321 +  } else {                                                                      \
   1.322 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   1.323 +  }                                                                             \
   1.324 +}
   1.325 +
   1.326 +#if INCLUDE_ALL_GCS
   1.327 +#define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
   1.328 +                                                                                \
   1.329 +int InstanceRefKlass::                                                          \
   1.330 +oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
   1.331 +  /* Get size before changing pointers */                                       \
   1.332 +  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   1.333 +                                                                                \
   1.334 +  int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
   1.335 +                                                                                \
   1.336 +  if (UseCompressedOops) {                                                      \
   1.337 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   1.338 +  } else {                                                                      \
   1.339 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   1.340 +  }                                                                             \
   1.341 +}
   1.342 +#endif // INCLUDE_ALL_GCS
   1.343 +
   1.344 +
   1.345 +#define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
   1.346 +                                                                                \
   1.347 +int InstanceRefKlass::                                                          \
   1.348 +oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
   1.349 +                               OopClosureType* closure,                         \
   1.350 +                               MemRegion mr) {                                  \
   1.351 +  SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   1.352 +                                                                                \
   1.353 +  int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
   1.354 +  if (UseCompressedOops) {                                                      \
   1.355 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
   1.356 +  } else {                                                                      \
   1.357 +    InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
   1.358 +  }                                                                             \
   1.359 +}
   1.360 +
   1.361 +ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   1.362 +ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   1.363 +#if INCLUDE_ALL_GCS
   1.364 +ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   1.365 +ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   1.366 +#endif // INCLUDE_ALL_GCS
   1.367 +ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   1.368 +ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   1.369 +
   1.370 +#if INCLUDE_ALL_GCS
   1.371 +template <class T>
   1.372 +void specialized_oop_push_contents(InstanceRefKlass *ref,
   1.373 +                                   PSPromotionManager* pm, oop obj) {
   1.374 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   1.375 +  if (PSScavenge::should_scavenge(referent_addr)) {
   1.376 +    ReferenceProcessor* rp = PSScavenge::reference_processor();
   1.377 +    if (rp->discover_reference(obj, ref->reference_type())) {
   1.378 +      // reference already enqueued, referent and next will be traversed later
   1.379 +      ref->InstanceKlass::oop_push_contents(pm, obj);
   1.380 +      return;
   1.381 +    } else {
   1.382 +      // treat referent as normal oop
   1.383 +      pm->claim_or_forward_depth(referent_addr);
   1.384 +    }
   1.385 +  }
   1.386 +  // Treat discovered as normal oop, if ref is not "active",
   1.387 +  // i.e. if next is non-NULL.
   1.388 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   1.389 +  if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   1.390 +    T  next_oop = oopDesc::load_heap_oop(next_addr);
   1.391 +    if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   1.392 +      T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   1.393 +      debug_only(
   1.394 +        if(TraceReferenceGC && PrintGCDetails) {
   1.395 +          gclog_or_tty->print_cr("   Process discovered as normal "
   1.396 +                                 INTPTR_FORMAT, discovered_addr);
   1.397 +        }
   1.398 +      )
   1.399 +      if (PSScavenge::should_scavenge(discovered_addr)) {
   1.400 +        pm->claim_or_forward_depth(discovered_addr);
   1.401 +      }
   1.402 +    }
   1.403 +  } else {
   1.404 +#ifdef ASSERT
   1.405 +    // In the case of older JDKs which do not use the discovered
   1.406 +    // field for the pending list, an inactive ref (next != NULL)
   1.407 +    // must always have a NULL discovered field.
   1.408 +    oop next = oopDesc::load_decode_heap_oop(next_addr);
   1.409 +    oop discovered = java_lang_ref_Reference::discovered(obj);
   1.410 +    assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   1.411 +           err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   1.412 +                   (oopDesc*)obj));
   1.413 +#endif
   1.414 +  }
   1.415 +
   1.416 +  // Treat next as normal oop;  next is a link in the reference queue.
   1.417 +  if (PSScavenge::should_scavenge(next_addr)) {
   1.418 +    pm->claim_or_forward_depth(next_addr);
   1.419 +  }
   1.420 +  ref->InstanceKlass::oop_push_contents(pm, obj);
   1.421 +}
   1.422 +
   1.423 +void InstanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
   1.424 +  if (UseCompressedOops) {
   1.425 +    specialized_oop_push_contents<narrowOop>(this, pm, obj);
   1.426 +  } else {
   1.427 +    specialized_oop_push_contents<oop>(this, pm, obj);
   1.428 +  }
   1.429 +}
   1.430 +
   1.431 +template <class T>
   1.432 +void specialized_oop_update_pointers(InstanceRefKlass *ref,
   1.433 +                                    ParCompactionManager* cm, oop obj) {
   1.434 +  T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   1.435 +  PSParallelCompact::adjust_pointer(referent_addr);
   1.436 +  T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   1.437 +  PSParallelCompact::adjust_pointer(next_addr);
   1.438 +  T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   1.439 +  PSParallelCompact::adjust_pointer(discovered_addr);
   1.440 +  debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj,
   1.441 +                                referent_addr, next_addr, discovered_addr);)
   1.442 +}
   1.443 +
   1.444 +int InstanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
   1.445 +  InstanceKlass::oop_update_pointers(cm, obj);
   1.446 +  if (UseCompressedOops) {
   1.447 +    specialized_oop_update_pointers<narrowOop>(this, cm, obj);
   1.448 +  } else {
   1.449 +    specialized_oop_update_pointers<oop>(this, cm, obj);
   1.450 +  }
   1.451 +  return size_helper();
   1.452 +}
   1.453 +#endif // INCLUDE_ALL_GCS
   1.454 +
   1.455 +void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) {
   1.456 +  // Clear the nonstatic oop-map entries corresponding to referent
   1.457 +  // and nextPending field.  They are treated specially by the
   1.458 +  // garbage collector.
   1.459 +  // The discovered field is used only by the garbage collector
   1.460 +  // and is also treated specially.
   1.461 +  InstanceKlass* ik = InstanceKlass::cast(k);
   1.462 +
   1.463 +  // Check that we have the right class
   1.464 +  debug_only(static bool first_time = true);
   1.465 +  assert(k == SystemDictionary::Reference_klass() && first_time,
   1.466 +         "Invalid update of maps");
   1.467 +  debug_only(first_time = false);
   1.468 +  assert(ik->nonstatic_oop_map_count() == 1, "just checking");
   1.469 +
   1.470 +  OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
   1.471 +
   1.472 +  // Check that the current map is (2,4) - currently points at field with
   1.473 +  // offset 2 (words) and has 4 map entries.
   1.474 +  debug_only(int offset = java_lang_ref_Reference::referent_offset);
   1.475 +  debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
   1.476 +    java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
   1.477 +
   1.478 +  if (UseSharedSpaces) {
   1.479 +    assert(map->offset() == java_lang_ref_Reference::queue_offset &&
   1.480 +           map->count() == 1, "just checking");
   1.481 +  } else {
   1.482 +    assert(map->offset() == offset && map->count() == count,
   1.483 +           "just checking");
   1.484 +
   1.485 +    // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
   1.486 +    map->set_offset(java_lang_ref_Reference::queue_offset);
   1.487 +    map->set_count(1);
   1.488 +  }
   1.489 +}
   1.490 +
   1.491 +
   1.492 +// Verification
   1.493 +
   1.494 +void InstanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
   1.495 +  InstanceKlass::oop_verify_on(obj, st);
   1.496 +  // Verify referent field
   1.497 +  oop referent = java_lang_ref_Reference::referent(obj);
   1.498 +
   1.499 +  // We should make this general to all heaps
   1.500 +  GenCollectedHeap* gch = NULL;
   1.501 +  if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
   1.502 +    gch = GenCollectedHeap::heap();
   1.503 +
   1.504 +  if (referent != NULL) {
   1.505 +    guarantee(referent->is_oop(), "referent field heap failed");
   1.506 +  }
   1.507 +  // Verify next field
   1.508 +  oop next = java_lang_ref_Reference::next(obj);
   1.509 +  if (next != NULL) {
   1.510 +    guarantee(next->is_oop(), "next field verify failed");
   1.511 +    guarantee(next->is_instanceRef(), "next field verify failed");
   1.512 +  }
   1.513 +}
   1.514 +
   1.515 +bool InstanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
   1.516 +  if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
   1.517 +  Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
   1.518 +  return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
   1.519 +}
   1.520 +
   1.521 +void InstanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
   1.522 +  // we may enter this with pending exception set
   1.523 +  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   1.524 +
   1.525 +  // Create a HandleMark in case we retry a GC multiple times.
   1.526 +  // Each time we attempt the GC, we allocate the handle below
   1.527 +  // to hold the pending list lock. We want to free this handle.
   1.528 +  HandleMark hm;
   1.529 +
   1.530 +  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   1.531 +  ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
   1.532 +  assert(ObjectSynchronizer::current_thread_holds_lock(
   1.533 +           JavaThread::current(), h_lock),
   1.534 +         "Locking should have succeeded");
   1.535 +  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   1.536 +}
   1.537 +
   1.538 +void InstanceRefKlass::release_and_notify_pending_list_lock(
   1.539 +  BasicLock *pending_list_basic_lock) {
   1.540 +  // we may enter this with pending exception set
   1.541 +  PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   1.542 +
   1.543 +  // Create a HandleMark in case we retry a GC multiple times.
   1.544 +  // Each time we attempt the GC, we allocate the handle below
   1.545 +  // to hold the pending list lock. We want to free this handle.
   1.546 +  HandleMark hm;
   1.547 +
   1.548 +  Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   1.549 +  assert(ObjectSynchronizer::current_thread_holds_lock(
   1.550 +           JavaThread::current(), h_lock),
   1.551 +         "Lock should be held");
   1.552 +  // Notify waiters on pending lists lock if there is any reference.
   1.553 +  if (java_lang_ref_Reference::pending_list() != NULL) {
   1.554 +    ObjectSynchronizer::notifyall(h_lock, THREAD);
   1.555 +  }
   1.556 +  ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
   1.557 +  if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   1.558 +}

mercurial