src/share/vm/oops/instanceRefKlass.cpp

Thu, 26 Sep 2013 10:25:02 -0400

author
hseigel
date
Thu, 26 Sep 2013 10:25:02 -0400
changeset 5784
190899198332
parent 4542
db9981fd3124
child 6680
78bbf4d43a14
permissions
-rw-r--r--

7195622: CheckUnhandledOops has limited usefulness now
Summary: Enable CHECK_UNHANDLED_OOPS in fastdebug builds across all supported platforms.
Reviewed-by: coleenp, hseigel, dholmes, stefank, twisti, ihse, rdurbin
Contributed-by: lois.foltan@oracle.com

     1 /*
     2  * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/systemDictionary.hpp"
    28 #include "gc_implementation/shared/markSweep.inline.hpp"
    29 #include "gc_interface/collectedHeap.hpp"
    30 #include "gc_interface/collectedHeap.inline.hpp"
    31 #include "memory/genCollectedHeap.hpp"
    32 #include "memory/genOopClosures.inline.hpp"
    33 #include "oops/instanceRefKlass.hpp"
    34 #include "oops/oop.inline.hpp"
    35 #include "utilities/preserveException.hpp"
    36 #include "utilities/macros.hpp"
    37 #if INCLUDE_ALL_GCS
    38 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
    39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
    40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
    41 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
    42 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
    43 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
    44 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
    45 #include "oops/oop.pcgc.inline.hpp"
    46 #endif // INCLUDE_ALL_GCS
    48 template <class T>
    49 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
    50   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
    51   T heap_oop = oopDesc::load_heap_oop(referent_addr);
    52   debug_only(
    53     if(TraceReferenceGC && PrintGCDetails) {
    54       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
    55     }
    56   )
    57   if (!oopDesc::is_null(heap_oop)) {
    58     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
    59     if (!referent->is_gc_marked() &&
    60         MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
    61       // reference was discovered, referent will be traversed later
    62       ref->InstanceKlass::oop_follow_contents(obj);
    63       debug_only(
    64         if(TraceReferenceGC && PrintGCDetails) {
    65           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
    66         }
    67       )
    68       return;
    69     } else {
    70       // treat referent as normal oop
    71       debug_only(
    72         if(TraceReferenceGC && PrintGCDetails) {
    73           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
    74         }
    75       )
    76       MarkSweep::mark_and_push(referent_addr);
    77     }
    78   }
    79   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
    80   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
    81     // Treat discovered as normal oop, if ref is not "active",
    82     // i.e. if next is non-NULL.
    83     T  next_oop = oopDesc::load_heap_oop(next_addr);
    84     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
    85       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
    86       debug_only(
    87         if(TraceReferenceGC && PrintGCDetails) {
    88           gclog_or_tty->print_cr("   Process discovered as normal "
    89                                  INTPTR_FORMAT, discovered_addr);
    90         }
    91       )
    92       MarkSweep::mark_and_push(discovered_addr);
    93     }
    94   } else {
    95 #ifdef ASSERT
    96     // In the case of older JDKs which do not use the discovered
    97     // field for the pending list, an inactive ref (next != NULL)
    98     // must always have a NULL discovered field.
    99     oop next = oopDesc::load_decode_heap_oop(next_addr);
   100     oop discovered = java_lang_ref_Reference::discovered(obj);
   101     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   102            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   103                    (oopDesc*)obj));
   104 #endif
   105   }
   106   // treat next as normal oop.  next is a link in the reference queue.
   107   debug_only(
   108     if(TraceReferenceGC && PrintGCDetails) {
   109       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
   110     }
   111   )
   112   MarkSweep::mark_and_push(next_addr);
   113   ref->InstanceKlass::oop_follow_contents(obj);
   114 }
   116 void InstanceRefKlass::oop_follow_contents(oop obj) {
   117   if (UseCompressedOops) {
   118     specialized_oop_follow_contents<narrowOop>(this, obj);
   119   } else {
   120     specialized_oop_follow_contents<oop>(this, obj);
   121   }
   122 }
   124 #if INCLUDE_ALL_GCS
   125 template <class T>
   126 void specialized_oop_follow_contents(InstanceRefKlass* ref,
   127                                      ParCompactionManager* cm,
   128                                      oop obj) {
   129   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   130   T heap_oop = oopDesc::load_heap_oop(referent_addr);
   131   debug_only(
   132     if(TraceReferenceGC && PrintGCDetails) {
   133       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
   134     }
   135   )
   136   if (!oopDesc::is_null(heap_oop)) {
   137     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
   138     if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
   139         PSParallelCompact::ref_processor()->
   140           discover_reference(obj, ref->reference_type())) {
   141       // reference already enqueued, referent will be traversed later
   142       ref->InstanceKlass::oop_follow_contents(cm, obj);
   143       debug_only(
   144         if(TraceReferenceGC && PrintGCDetails) {
   145           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
   146         }
   147       )
   148       return;
   149     } else {
   150       // treat referent as normal oop
   151       debug_only(
   152         if(TraceReferenceGC && PrintGCDetails) {
   153           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
   154         }
   155       )
   156       PSParallelCompact::mark_and_push(cm, referent_addr);
   157     }
   158   }
   159   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   160   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   161     // Treat discovered as normal oop, if ref is not "active",
   162     // i.e. if next is non-NULL.
   163     T  next_oop = oopDesc::load_heap_oop(next_addr);
   164     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   165       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   166       debug_only(
   167         if(TraceReferenceGC && PrintGCDetails) {
   168           gclog_or_tty->print_cr("   Process discovered as normal "
   169                                  INTPTR_FORMAT, discovered_addr);
   170         }
   171       )
   172       PSParallelCompact::mark_and_push(cm, discovered_addr);
   173     }
   174   } else {
   175 #ifdef ASSERT
   176     // In the case of older JDKs which do not use the discovered
   177     // field for the pending list, an inactive ref (next != NULL)
   178     // must always have a NULL discovered field.
   179     T next = oopDesc::load_heap_oop(next_addr);
   180     oop discovered = java_lang_ref_Reference::discovered(obj);
   181     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   182            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   183                    (oopDesc*)obj));
   184 #endif
   185   }
   186   PSParallelCompact::mark_and_push(cm, next_addr);
   187   ref->InstanceKlass::oop_follow_contents(cm, obj);
   188 }
   190 void InstanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
   191                                            oop obj) {
   192   if (UseCompressedOops) {
   193     specialized_oop_follow_contents<narrowOop>(this, cm, obj);
   194   } else {
   195     specialized_oop_follow_contents<oop>(this, cm, obj);
   196   }
   197 }
   198 #endif // INCLUDE_ALL_GCS
   200 #ifdef ASSERT
   201 template <class T> void trace_reference_gc(const char *s, oop obj,
   202                                            T* referent_addr,
   203                                            T* next_addr,
   204                                            T* discovered_addr) {
   205   if(TraceReferenceGC && PrintGCDetails) {
   206     gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
   207     gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
   208          INTPTR_FORMAT, referent_addr,
   209          referent_addr ?
   210            (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
   211     gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
   212          INTPTR_FORMAT, next_addr,
   213          next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
   214     gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
   215          INTPTR_FORMAT, discovered_addr,
   216          discovered_addr ?
   217            (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
   218   }
   219 }
   220 #endif
   222 template <class T> void specialized_oop_adjust_pointers(InstanceRefKlass *ref, oop obj) {
   223   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   224   MarkSweep::adjust_pointer(referent_addr);
   225   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   226   MarkSweep::adjust_pointer(next_addr);
   227   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   228   MarkSweep::adjust_pointer(discovered_addr);
   229   debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
   230                                 referent_addr, next_addr, discovered_addr);)
   231 }
   233 int InstanceRefKlass::oop_adjust_pointers(oop obj) {
   234   int size = size_helper();
   235   InstanceKlass::oop_adjust_pointers(obj);
   237   if (UseCompressedOops) {
   238     specialized_oop_adjust_pointers<narrowOop>(this, obj);
   239   } else {
   240     specialized_oop_adjust_pointers<oop>(this, obj);
   241   }
   242   return size;
   243 }
   245 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
   246   T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);             \
   247   if (closure->apply_to_weak_ref_discovered_field()) {                          \
   248     closure->do_oop##nv_suffix(disc_addr);                                      \
   249   }                                                                             \
   250                                                                                 \
   251   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
   252   T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
   253   ReferenceProcessor* rp = closure->_ref_processor;                             \
   254   if (!oopDesc::is_null(heap_oop)) {                                            \
   255     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
   256     if (!referent->is_gc_marked() && (rp != NULL) &&                            \
   257         rp->discover_reference(obj, reference_type())) {                        \
   258       return size;                                                              \
   259     } else if (contains(referent_addr)) {                                       \
   260       /* treat referent as normal oop */                                        \
   261       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   262       closure->do_oop##nv_suffix(referent_addr);                                \
   263     }                                                                           \
   264   }                                                                             \
   265   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
   266   if (ReferenceProcessor::pending_list_uses_discovered_field()) {               \
   267     T next_oop  = oopDesc::load_heap_oop(next_addr);                            \
   268     /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
   269     if (!oopDesc::is_null(next_oop) && contains(disc_addr)) {                   \
   270         /* i.e. ref is not "active" */                                          \
   271       debug_only(                                                               \
   272         if(TraceReferenceGC && PrintGCDetails) {                                \
   273           gclog_or_tty->print_cr("   Process discovered as normal "             \
   274                                  INTPTR_FORMAT, disc_addr);                     \
   275         }                                                                       \
   276       )                                                                         \
   277       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   278       closure->do_oop##nv_suffix(disc_addr);                                    \
   279     }                                                                           \
   280   } else {                                                                      \
   281     /* In the case of older JDKs which do not use the discovered field for  */  \
   282     /* the pending list, an inactive ref (next != NULL) must always have a  */  \
   283     /* NULL discovered field. */                                                \
   284     debug_only(                                                                 \
   285       T next_oop = oopDesc::load_heap_oop(next_addr);                           \
   286       T disc_oop = oopDesc::load_heap_oop(disc_addr);                           \
   287       assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop),          \
   288            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
   289                    "discovered field", (oopDesc*)obj));                                   \
   290     )                                                                           \
   291   }                                                                             \
   292   /* treat next as normal oop */                                                \
   293   if (contains(next_addr)) {                                                    \
   294     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
   295     closure->do_oop##nv_suffix(next_addr);                                      \
   296   }                                                                             \
   297   return size;                                                                  \
   300 template <class T> bool contains(T *t) { return true; }
   302 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
   303 // all closures.  Macros calling macros above for each oop size.
   305 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
   306                                                                                 \
   307 int InstanceRefKlass::                                                          \
   308 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
   309   /* Get size before changing pointers */                                       \
   310   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   311                                                                                 \
   312   int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
   313                                                                                 \
   314   if (UseCompressedOops) {                                                      \
   315     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   316   } else {                                                                      \
   317     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   318   }                                                                             \
   319 }
   321 #if INCLUDE_ALL_GCS
   322 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
   323                                                                                 \
   324 int InstanceRefKlass::                                                          \
   325 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
   326   /* Get size before changing pointers */                                       \
   327   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   328                                                                                 \
   329   int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
   330                                                                                 \
   331   if (UseCompressedOops) {                                                      \
   332     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   333   } else {                                                                      \
   334     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   335   }                                                                             \
   336 }
   337 #endif // INCLUDE_ALL_GCS
   340 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
   341                                                                                 \
   342 int InstanceRefKlass::                                                          \
   343 oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
   344                                OopClosureType* closure,                         \
   345                                MemRegion mr) {                                  \
   346   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   347                                                                                 \
   348   int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
   349   if (UseCompressedOops) {                                                      \
   350     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
   351   } else {                                                                      \
   352     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
   353   }                                                                             \
   354 }
   356 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   357 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   358 #if INCLUDE_ALL_GCS
   359 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   360 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   361 #endif // INCLUDE_ALL_GCS
   362 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   363 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   365 #if INCLUDE_ALL_GCS
   366 template <class T>
   367 void specialized_oop_push_contents(InstanceRefKlass *ref,
   368                                    PSPromotionManager* pm, oop obj) {
   369   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   370   if (PSScavenge::should_scavenge(referent_addr)) {
   371     ReferenceProcessor* rp = PSScavenge::reference_processor();
   372     if (rp->discover_reference(obj, ref->reference_type())) {
   373       // reference already enqueued, referent and next will be traversed later
   374       ref->InstanceKlass::oop_push_contents(pm, obj);
   375       return;
   376     } else {
   377       // treat referent as normal oop
   378       pm->claim_or_forward_depth(referent_addr);
   379     }
   380   }
   381   // Treat discovered as normal oop, if ref is not "active",
   382   // i.e. if next is non-NULL.
   383   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   384   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   385     T  next_oop = oopDesc::load_heap_oop(next_addr);
   386     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   387       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   388       debug_only(
   389         if(TraceReferenceGC && PrintGCDetails) {
   390           gclog_or_tty->print_cr("   Process discovered as normal "
   391                                  INTPTR_FORMAT, discovered_addr);
   392         }
   393       )
   394       if (PSScavenge::should_scavenge(discovered_addr)) {
   395         pm->claim_or_forward_depth(discovered_addr);
   396       }
   397     }
   398   } else {
   399 #ifdef ASSERT
   400     // In the case of older JDKs which do not use the discovered
   401     // field for the pending list, an inactive ref (next != NULL)
   402     // must always have a NULL discovered field.
   403     oop next = oopDesc::load_decode_heap_oop(next_addr);
   404     oop discovered = java_lang_ref_Reference::discovered(obj);
   405     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   406            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   407                    (oopDesc*)obj));
   408 #endif
   409   }
   411   // Treat next as normal oop;  next is a link in the reference queue.
   412   if (PSScavenge::should_scavenge(next_addr)) {
   413     pm->claim_or_forward_depth(next_addr);
   414   }
   415   ref->InstanceKlass::oop_push_contents(pm, obj);
   416 }
   418 void InstanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
   419   if (UseCompressedOops) {
   420     specialized_oop_push_contents<narrowOop>(this, pm, obj);
   421   } else {
   422     specialized_oop_push_contents<oop>(this, pm, obj);
   423   }
   424 }
   426 template <class T>
   427 void specialized_oop_update_pointers(InstanceRefKlass *ref,
   428                                     ParCompactionManager* cm, oop obj) {
   429   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   430   PSParallelCompact::adjust_pointer(referent_addr);
   431   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   432   PSParallelCompact::adjust_pointer(next_addr);
   433   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   434   PSParallelCompact::adjust_pointer(discovered_addr);
   435   debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj,
   436                                 referent_addr, next_addr, discovered_addr);)
   437 }
   439 int InstanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
   440   InstanceKlass::oop_update_pointers(cm, obj);
   441   if (UseCompressedOops) {
   442     specialized_oop_update_pointers<narrowOop>(this, cm, obj);
   443   } else {
   444     specialized_oop_update_pointers<oop>(this, cm, obj);
   445   }
   446   return size_helper();
   447 }
   448 #endif // INCLUDE_ALL_GCS
   450 void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) {
   451   // Clear the nonstatic oop-map entries corresponding to referent
   452   // and nextPending field.  They are treated specially by the
   453   // garbage collector.
   454   // The discovered field is used only by the garbage collector
   455   // and is also treated specially.
   456   InstanceKlass* ik = InstanceKlass::cast(k);
   458   // Check that we have the right class
   459   debug_only(static bool first_time = true);
   460   assert(k == SystemDictionary::Reference_klass() && first_time,
   461          "Invalid update of maps");
   462   debug_only(first_time = false);
   463   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
   465   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
   467   // Check that the current map is (2,4) - currently points at field with
   468   // offset 2 (words) and has 4 map entries.
   469   debug_only(int offset = java_lang_ref_Reference::referent_offset);
   470   debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
   471     java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
   473   if (UseSharedSpaces) {
   474     assert(map->offset() == java_lang_ref_Reference::queue_offset &&
   475            map->count() == 1, "just checking");
   476   } else {
   477     assert(map->offset() == offset && map->count() == count,
   478            "just checking");
   480     // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
   481     map->set_offset(java_lang_ref_Reference::queue_offset);
   482     map->set_count(1);
   483   }
   484 }
   487 // Verification
   489 void InstanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
   490   InstanceKlass::oop_verify_on(obj, st);
   491   // Verify referent field
   492   oop referent = java_lang_ref_Reference::referent(obj);
   494   // We should make this general to all heaps
   495   GenCollectedHeap* gch = NULL;
   496   if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
   497     gch = GenCollectedHeap::heap();
   499   if (referent != NULL) {
   500     guarantee(referent->is_oop(), "referent field heap failed");
   501   }
   502   // Verify next field
   503   oop next = java_lang_ref_Reference::next(obj);
   504   if (next != NULL) {
   505     guarantee(next->is_oop(), "next field verify failed");
   506     guarantee(next->is_instanceRef(), "next field verify failed");
   507   }
   508 }
   510 bool InstanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
   511   if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
   512   Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
   513   return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
   514 }
   516 void InstanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
   517   // we may enter this with pending exception set
   518   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   520   // Create a HandleMark in case we retry a GC multiple times.
   521   // Each time we attempt the GC, we allocate the handle below
   522   // to hold the pending list lock. We want to free this handle.
   523   HandleMark hm;
   525   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   526   ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
   527   assert(ObjectSynchronizer::current_thread_holds_lock(
   528            JavaThread::current(), h_lock),
   529          "Locking should have succeeded");
   530   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   531 }
   533 void InstanceRefKlass::release_and_notify_pending_list_lock(
   534   BasicLock *pending_list_basic_lock) {
   535   // we may enter this with pending exception set
   536   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   538   // Create a HandleMark in case we retry a GC multiple times.
   539   // Each time we attempt the GC, we allocate the handle below
   540   // to hold the pending list lock. We want to free this handle.
   541   HandleMark hm;
   543   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   544   assert(ObjectSynchronizer::current_thread_holds_lock(
   545            JavaThread::current(), h_lock),
   546          "Lock should be held");
   547   // Notify waiters on pending lists lock if there is any reference.
   548   if (java_lang_ref_Reference::pending_list() != NULL) {
   549     ObjectSynchronizer::notifyall(h_lock, THREAD);
   550   }
   551   ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
   552   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   553 }

mercurial