src/share/vm/oops/instanceRefKlass.cpp

Thu, 22 May 2014 15:52:41 -0400

author
drchase
date
Thu, 22 May 2014 15:52:41 -0400
changeset 6680
78bbf4d43a14
parent 5784
190899198332
child 6876
710a3c8b516e
child 7091
a8ea2f110d87
permissions
-rw-r--r--

8037816: Fix for 8036122 breaks build with Xcode5/clang
8043029: Change 8037816 breaks HS build with older GCC versions which don't support diagnostic pragmas
8043164: Format warning in traceStream.hpp
Summary: Backport of main fix + two corrections, enables clang compilation, turns on format attributes, corrects/mutes warnings
Reviewed-by: kvn, coleenp, iveresov, twisti

     1 /*
     2  * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "classfile/javaClasses.hpp"
    27 #include "classfile/systemDictionary.hpp"
    28 #include "gc_implementation/shared/markSweep.inline.hpp"
    29 #include "gc_interface/collectedHeap.hpp"
    30 #include "gc_interface/collectedHeap.inline.hpp"
    31 #include "memory/genCollectedHeap.hpp"
    32 #include "memory/genOopClosures.inline.hpp"
    33 #include "oops/instanceRefKlass.hpp"
    34 #include "oops/oop.inline.hpp"
    35 #include "utilities/preserveException.hpp"
    36 #include "utilities/macros.hpp"
    37 #if INCLUDE_ALL_GCS
    38 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
    39 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
    40 #include "gc_implementation/g1/g1RemSet.inline.hpp"
    41 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
    42 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
    43 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
    44 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
    45 #include "oops/oop.pcgc.inline.hpp"
    46 #endif // INCLUDE_ALL_GCS
    48 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
    50 template <class T>
    51 void specialized_oop_follow_contents(InstanceRefKlass* ref, oop obj) {
    52   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
    53   T heap_oop = oopDesc::load_heap_oop(referent_addr);
    54   debug_only(
    55     if(TraceReferenceGC && PrintGCDetails) {
    56       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
    57     }
    58   )
    59   if (!oopDesc::is_null(heap_oop)) {
    60     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
    61     if (!referent->is_gc_marked() &&
    62         MarkSweep::ref_processor()->discover_reference(obj, ref->reference_type())) {
    63       // reference was discovered, referent will be traversed later
    64       ref->InstanceKlass::oop_follow_contents(obj);
    65       debug_only(
    66         if(TraceReferenceGC && PrintGCDetails) {
    67           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
    68         }
    69       )
    70       return;
    71     } else {
    72       // treat referent as normal oop
    73       debug_only(
    74         if(TraceReferenceGC && PrintGCDetails) {
    75           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
    76         }
    77       )
    78       MarkSweep::mark_and_push(referent_addr);
    79     }
    80   }
    81   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
    82   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
    83     // Treat discovered as normal oop, if ref is not "active",
    84     // i.e. if next is non-NULL.
    85     T  next_oop = oopDesc::load_heap_oop(next_addr);
    86     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
    87       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
    88       debug_only(
    89         if(TraceReferenceGC && PrintGCDetails) {
    90           gclog_or_tty->print_cr("   Process discovered as normal "
    91                                  INTPTR_FORMAT, discovered_addr);
    92         }
    93       )
    94       MarkSweep::mark_and_push(discovered_addr);
    95     }
    96   } else {
    97 #ifdef ASSERT
    98     // In the case of older JDKs which do not use the discovered
    99     // field for the pending list, an inactive ref (next != NULL)
   100     // must always have a NULL discovered field.
   101     oop next = oopDesc::load_decode_heap_oop(next_addr);
   102     oop discovered = java_lang_ref_Reference::discovered(obj);
   103     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   104            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   105                    (oopDesc*)obj));
   106 #endif
   107   }
   108   // treat next as normal oop.  next is a link in the reference queue.
   109   debug_only(
   110     if(TraceReferenceGC && PrintGCDetails) {
   111       gclog_or_tty->print_cr("   Process next as normal " INTPTR_FORMAT, next_addr);
   112     }
   113   )
   114   MarkSweep::mark_and_push(next_addr);
   115   ref->InstanceKlass::oop_follow_contents(obj);
   116 }
   118 void InstanceRefKlass::oop_follow_contents(oop obj) {
   119   if (UseCompressedOops) {
   120     specialized_oop_follow_contents<narrowOop>(this, obj);
   121   } else {
   122     specialized_oop_follow_contents<oop>(this, obj);
   123   }
   124 }
   126 #if INCLUDE_ALL_GCS
   127 template <class T>
   128 void specialized_oop_follow_contents(InstanceRefKlass* ref,
   129                                      ParCompactionManager* cm,
   130                                      oop obj) {
   131   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   132   T heap_oop = oopDesc::load_heap_oop(referent_addr);
   133   debug_only(
   134     if(TraceReferenceGC && PrintGCDetails) {
   135       gclog_or_tty->print_cr("InstanceRefKlass::oop_follow_contents " INTPTR_FORMAT, (void *)obj);
   136     }
   137   )
   138   if (!oopDesc::is_null(heap_oop)) {
   139     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);
   140     if (PSParallelCompact::mark_bitmap()->is_unmarked(referent) &&
   141         PSParallelCompact::ref_processor()->
   142           discover_reference(obj, ref->reference_type())) {
   143       // reference already enqueued, referent will be traversed later
   144       ref->InstanceKlass::oop_follow_contents(cm, obj);
   145       debug_only(
   146         if(TraceReferenceGC && PrintGCDetails) {
   147           gclog_or_tty->print_cr("       Non NULL enqueued " INTPTR_FORMAT, (void *)obj);
   148         }
   149       )
   150       return;
   151     } else {
   152       // treat referent as normal oop
   153       debug_only(
   154         if(TraceReferenceGC && PrintGCDetails) {
   155           gclog_or_tty->print_cr("       Non NULL normal " INTPTR_FORMAT, (void *)obj);
   156         }
   157       )
   158       PSParallelCompact::mark_and_push(cm, referent_addr);
   159     }
   160   }
   161   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   162   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   163     // Treat discovered as normal oop, if ref is not "active",
   164     // i.e. if next is non-NULL.
   165     T  next_oop = oopDesc::load_heap_oop(next_addr);
   166     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   167       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   168       debug_only(
   169         if(TraceReferenceGC && PrintGCDetails) {
   170           gclog_or_tty->print_cr("   Process discovered as normal "
   171                                  INTPTR_FORMAT, discovered_addr);
   172         }
   173       )
   174       PSParallelCompact::mark_and_push(cm, discovered_addr);
   175     }
   176   } else {
   177 #ifdef ASSERT
   178     // In the case of older JDKs which do not use the discovered
   179     // field for the pending list, an inactive ref (next != NULL)
   180     // must always have a NULL discovered field.
   181     T next = oopDesc::load_heap_oop(next_addr);
   182     oop discovered = java_lang_ref_Reference::discovered(obj);
   183     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   184            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   185                    (oopDesc*)obj));
   186 #endif
   187   }
   188   PSParallelCompact::mark_and_push(cm, next_addr);
   189   ref->InstanceKlass::oop_follow_contents(cm, obj);
   190 }
   192 void InstanceRefKlass::oop_follow_contents(ParCompactionManager* cm,
   193                                            oop obj) {
   194   if (UseCompressedOops) {
   195     specialized_oop_follow_contents<narrowOop>(this, cm, obj);
   196   } else {
   197     specialized_oop_follow_contents<oop>(this, cm, obj);
   198   }
   199 }
   200 #endif // INCLUDE_ALL_GCS
   202 #ifdef ASSERT
   203 template <class T> void trace_reference_gc(const char *s, oop obj,
   204                                            T* referent_addr,
   205                                            T* next_addr,
   206                                            T* discovered_addr) {
   207   if(TraceReferenceGC && PrintGCDetails) {
   208     gclog_or_tty->print_cr("%s obj " INTPTR_FORMAT, s, (address)obj);
   209     gclog_or_tty->print_cr("     referent_addr/* " INTPTR_FORMAT " / "
   210          INTPTR_FORMAT, referent_addr,
   211          referent_addr ?
   212            (address)oopDesc::load_decode_heap_oop(referent_addr) : NULL);
   213     gclog_or_tty->print_cr("     next_addr/* " INTPTR_FORMAT " / "
   214          INTPTR_FORMAT, next_addr,
   215          next_addr ? (address)oopDesc::load_decode_heap_oop(next_addr) : NULL);
   216     gclog_or_tty->print_cr("     discovered_addr/* " INTPTR_FORMAT " / "
   217          INTPTR_FORMAT, discovered_addr,
   218          discovered_addr ?
   219            (address)oopDesc::load_decode_heap_oop(discovered_addr) : NULL);
   220   }
   221 }
   222 #endif
   224 template <class T> void specialized_oop_adjust_pointers(InstanceRefKlass *ref, oop obj) {
   225   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   226   MarkSweep::adjust_pointer(referent_addr);
   227   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   228   MarkSweep::adjust_pointer(next_addr);
   229   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   230   MarkSweep::adjust_pointer(discovered_addr);
   231   debug_only(trace_reference_gc("InstanceRefKlass::oop_adjust_pointers", obj,
   232                                 referent_addr, next_addr, discovered_addr);)
   233 }
   235 int InstanceRefKlass::oop_adjust_pointers(oop obj) {
   236   int size = size_helper();
   237   InstanceKlass::oop_adjust_pointers(obj);
   239   if (UseCompressedOops) {
   240     specialized_oop_adjust_pointers<narrowOop>(this, obj);
   241   } else {
   242     specialized_oop_adjust_pointers<oop>(this, obj);
   243   }
   244   return size;
   245 }
   247 #define InstanceRefKlass_SPECIALIZED_OOP_ITERATE(T, nv_suffix, contains)        \
   248   T* disc_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);             \
   249   if (closure->apply_to_weak_ref_discovered_field()) {                          \
   250     closure->do_oop##nv_suffix(disc_addr);                                      \
   251   }                                                                             \
   252                                                                                 \
   253   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);           \
   254   T heap_oop = oopDesc::load_heap_oop(referent_addr);                           \
   255   ReferenceProcessor* rp = closure->_ref_processor;                             \
   256   if (!oopDesc::is_null(heap_oop)) {                                            \
   257     oop referent = oopDesc::decode_heap_oop_not_null(heap_oop);                 \
   258     if (!referent->is_gc_marked() && (rp != NULL) &&                            \
   259         rp->discover_reference(obj, reference_type())) {                        \
   260       return size;                                                              \
   261     } else if (contains(referent_addr)) {                                       \
   262       /* treat referent as normal oop */                                        \
   263       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   264       closure->do_oop##nv_suffix(referent_addr);                                \
   265     }                                                                           \
   266   }                                                                             \
   267   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);                   \
   268   if (ReferenceProcessor::pending_list_uses_discovered_field()) {               \
   269     T next_oop  = oopDesc::load_heap_oop(next_addr);                            \
   270     /* Treat discovered as normal oop, if ref is not "active" (next non-NULL) */\
   271     if (!oopDesc::is_null(next_oop) && contains(disc_addr)) {                   \
   272         /* i.e. ref is not "active" */                                          \
   273       debug_only(                                                               \
   274         if(TraceReferenceGC && PrintGCDetails) {                                \
   275           gclog_or_tty->print_cr("   Process discovered as normal "             \
   276                                  INTPTR_FORMAT, disc_addr);                     \
   277         }                                                                       \
   278       )                                                                         \
   279       SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk);\
   280       closure->do_oop##nv_suffix(disc_addr);                                    \
   281     }                                                                           \
   282   } else {                                                                      \
   283     /* In the case of older JDKs which do not use the discovered field for  */  \
   284     /* the pending list, an inactive ref (next != NULL) must always have a  */  \
   285     /* NULL discovered field. */                                                \
   286     debug_only(                                                                 \
   287       T next_oop = oopDesc::load_heap_oop(next_addr);                           \
   288       T disc_oop = oopDesc::load_heap_oop(disc_addr);                           \
   289       assert(oopDesc::is_null(next_oop) || oopDesc::is_null(disc_oop),          \
   290            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL" \
   291                    "discovered field", (oopDesc*)obj));                                   \
   292     )                                                                           \
   293   }                                                                             \
   294   /* treat next as normal oop */                                                \
   295   if (contains(next_addr)) {                                                    \
   296     SpecializationStats::record_do_oop_call##nv_suffix(SpecializationStats::irk); \
   297     closure->do_oop##nv_suffix(next_addr);                                      \
   298   }                                                                             \
   299   return size;                                                                  \
   302 template <class T> bool contains(T *t) { return true; }
   304 // Macro to define InstanceRefKlass::oop_oop_iterate for virtual/nonvirtual for
   305 // all closures.  Macros calling macros above for each oop size.
   307 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix)        \
   308                                                                                 \
   309 int InstanceRefKlass::                                                          \
   310 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) {                  \
   311   /* Get size before changing pointers */                                       \
   312   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   313                                                                                 \
   314   int size = InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure);           \
   315                                                                                 \
   316   if (UseCompressedOops) {                                                      \
   317     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   318   } else {                                                                      \
   319     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   320   }                                                                             \
   321 }
   323 #if INCLUDE_ALL_GCS
   324 #define InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
   325                                                                                 \
   326 int InstanceRefKlass::                                                          \
   327 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) {        \
   328   /* Get size before changing pointers */                                       \
   329   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   330                                                                                 \
   331   int size = InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
   332                                                                                 \
   333   if (UseCompressedOops) {                                                      \
   334     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, contains);   \
   335   } else {                                                                      \
   336     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, contains);         \
   337   }                                                                             \
   338 }
   339 #endif // INCLUDE_ALL_GCS
   342 #define InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix)      \
   343                                                                                 \
   344 int InstanceRefKlass::                                                          \
   345 oop_oop_iterate##nv_suffix##_m(oop obj,                                         \
   346                                OopClosureType* closure,                         \
   347                                MemRegion mr) {                                  \
   348   SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk);\
   349                                                                                 \
   350   int size = InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr);   \
   351   if (UseCompressedOops) {                                                      \
   352     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr.contains); \
   353   } else {                                                                      \
   354     InstanceRefKlass_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr.contains);      \
   355   }                                                                             \
   356 }
   358 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   359 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN)
   360 #if INCLUDE_ALL_GCS
   361 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   362 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
   363 #endif // INCLUDE_ALL_GCS
   364 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   365 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceRefKlass_OOP_OOP_ITERATE_DEFN_m)
   367 #if INCLUDE_ALL_GCS
   368 template <class T>
   369 void specialized_oop_push_contents(InstanceRefKlass *ref,
   370                                    PSPromotionManager* pm, oop obj) {
   371   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   372   if (PSScavenge::should_scavenge(referent_addr)) {
   373     ReferenceProcessor* rp = PSScavenge::reference_processor();
   374     if (rp->discover_reference(obj, ref->reference_type())) {
   375       // reference already enqueued, referent and next will be traversed later
   376       ref->InstanceKlass::oop_push_contents(pm, obj);
   377       return;
   378     } else {
   379       // treat referent as normal oop
   380       pm->claim_or_forward_depth(referent_addr);
   381     }
   382   }
   383   // Treat discovered as normal oop, if ref is not "active",
   384   // i.e. if next is non-NULL.
   385   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   386   if (ReferenceProcessor::pending_list_uses_discovered_field()) {
   387     T  next_oop = oopDesc::load_heap_oop(next_addr);
   388     if (!oopDesc::is_null(next_oop)) { // i.e. ref is not "active"
   389       T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   390       debug_only(
   391         if(TraceReferenceGC && PrintGCDetails) {
   392           gclog_or_tty->print_cr("   Process discovered as normal "
   393                                  INTPTR_FORMAT, discovered_addr);
   394         }
   395       )
   396       if (PSScavenge::should_scavenge(discovered_addr)) {
   397         pm->claim_or_forward_depth(discovered_addr);
   398       }
   399     }
   400   } else {
   401 #ifdef ASSERT
   402     // In the case of older JDKs which do not use the discovered
   403     // field for the pending list, an inactive ref (next != NULL)
   404     // must always have a NULL discovered field.
   405     oop next = oopDesc::load_decode_heap_oop(next_addr);
   406     oop discovered = java_lang_ref_Reference::discovered(obj);
   407     assert(oopDesc::is_null(next) || oopDesc::is_null(discovered),
   408            err_msg("Found an inactive reference " PTR_FORMAT " with a non-NULL discovered field",
   409                    (oopDesc*)obj));
   410 #endif
   411   }
   413   // Treat next as normal oop;  next is a link in the reference queue.
   414   if (PSScavenge::should_scavenge(next_addr)) {
   415     pm->claim_or_forward_depth(next_addr);
   416   }
   417   ref->InstanceKlass::oop_push_contents(pm, obj);
   418 }
   420 void InstanceRefKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
   421   if (UseCompressedOops) {
   422     specialized_oop_push_contents<narrowOop>(this, pm, obj);
   423   } else {
   424     specialized_oop_push_contents<oop>(this, pm, obj);
   425   }
   426 }
   428 template <class T>
   429 void specialized_oop_update_pointers(InstanceRefKlass *ref,
   430                                     ParCompactionManager* cm, oop obj) {
   431   T* referent_addr = (T*)java_lang_ref_Reference::referent_addr(obj);
   432   PSParallelCompact::adjust_pointer(referent_addr);
   433   T* next_addr = (T*)java_lang_ref_Reference::next_addr(obj);
   434   PSParallelCompact::adjust_pointer(next_addr);
   435   T* discovered_addr = (T*)java_lang_ref_Reference::discovered_addr(obj);
   436   PSParallelCompact::adjust_pointer(discovered_addr);
   437   debug_only(trace_reference_gc("InstanceRefKlass::oop_update_ptrs", obj,
   438                                 referent_addr, next_addr, discovered_addr);)
   439 }
   441 int InstanceRefKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
   442   InstanceKlass::oop_update_pointers(cm, obj);
   443   if (UseCompressedOops) {
   444     specialized_oop_update_pointers<narrowOop>(this, cm, obj);
   445   } else {
   446     specialized_oop_update_pointers<oop>(this, cm, obj);
   447   }
   448   return size_helper();
   449 }
   450 #endif // INCLUDE_ALL_GCS
   452 void InstanceRefKlass::update_nonstatic_oop_maps(Klass* k) {
   453   // Clear the nonstatic oop-map entries corresponding to referent
   454   // and nextPending field.  They are treated specially by the
   455   // garbage collector.
   456   // The discovered field is used only by the garbage collector
   457   // and is also treated specially.
   458   InstanceKlass* ik = InstanceKlass::cast(k);
   460   // Check that we have the right class
   461   debug_only(static bool first_time = true);
   462   assert(k == SystemDictionary::Reference_klass() && first_time,
   463          "Invalid update of maps");
   464   debug_only(first_time = false);
   465   assert(ik->nonstatic_oop_map_count() == 1, "just checking");
   467   OopMapBlock* map = ik->start_of_nonstatic_oop_maps();
   469   // Check that the current map is (2,4) - currently points at field with
   470   // offset 2 (words) and has 4 map entries.
   471   debug_only(int offset = java_lang_ref_Reference::referent_offset);
   472   debug_only(unsigned int count = ((java_lang_ref_Reference::discovered_offset -
   473     java_lang_ref_Reference::referent_offset)/heapOopSize) + 1);
   475   if (UseSharedSpaces) {
   476     assert(map->offset() == java_lang_ref_Reference::queue_offset &&
   477            map->count() == 1, "just checking");
   478   } else {
   479     assert(map->offset() == offset && map->count() == count,
   480            "just checking");
   482     // Update map to (3,1) - point to offset of 3 (words) with 1 map entry.
   483     map->set_offset(java_lang_ref_Reference::queue_offset);
   484     map->set_count(1);
   485   }
   486 }
   489 // Verification
   491 void InstanceRefKlass::oop_verify_on(oop obj, outputStream* st) {
   492   InstanceKlass::oop_verify_on(obj, st);
   493   // Verify referent field
   494   oop referent = java_lang_ref_Reference::referent(obj);
   496   // We should make this general to all heaps
   497   GenCollectedHeap* gch = NULL;
   498   if (Universe::heap()->kind() == CollectedHeap::GenCollectedHeap)
   499     gch = GenCollectedHeap::heap();
   501   if (referent != NULL) {
   502     guarantee(referent->is_oop(), "referent field heap failed");
   503   }
   504   // Verify next field
   505   oop next = java_lang_ref_Reference::next(obj);
   506   if (next != NULL) {
   507     guarantee(next->is_oop(), "next field verify failed");
   508     guarantee(next->is_instanceRef(), "next field verify failed");
   509   }
   510 }
   512 bool InstanceRefKlass::owns_pending_list_lock(JavaThread* thread) {
   513   if (java_lang_ref_Reference::pending_list_lock() == NULL) return false;
   514   Handle h_lock(thread, java_lang_ref_Reference::pending_list_lock());
   515   return ObjectSynchronizer::current_thread_holds_lock(thread, h_lock);
   516 }
   518 void InstanceRefKlass::acquire_pending_list_lock(BasicLock *pending_list_basic_lock) {
   519   // we may enter this with pending exception set
   520   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   522   // Create a HandleMark in case we retry a GC multiple times.
   523   // Each time we attempt the GC, we allocate the handle below
   524   // to hold the pending list lock. We want to free this handle.
   525   HandleMark hm;
   527   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   528   ObjectSynchronizer::fast_enter(h_lock, pending_list_basic_lock, false, THREAD);
   529   assert(ObjectSynchronizer::current_thread_holds_lock(
   530            JavaThread::current(), h_lock),
   531          "Locking should have succeeded");
   532   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   533 }
   535 void InstanceRefKlass::release_and_notify_pending_list_lock(
   536   BasicLock *pending_list_basic_lock) {
   537   // we may enter this with pending exception set
   538   PRESERVE_EXCEPTION_MARK;  // exceptions are never thrown, needed for TRAPS argument
   540   // Create a HandleMark in case we retry a GC multiple times.
   541   // Each time we attempt the GC, we allocate the handle below
   542   // to hold the pending list lock. We want to free this handle.
   543   HandleMark hm;
   545   Handle h_lock(THREAD, java_lang_ref_Reference::pending_list_lock());
   546   assert(ObjectSynchronizer::current_thread_holds_lock(
   547            JavaThread::current(), h_lock),
   548          "Lock should be held");
   549   // Notify waiters on pending lists lock if there is any reference.
   550   if (java_lang_ref_Reference::pending_list() != NULL) {
   551     ObjectSynchronizer::notifyall(h_lock, THREAD);
   552   }
   553   ObjectSynchronizer::fast_exit(h_lock(), pending_list_basic_lock, THREAD);
   554   if (HAS_PENDING_EXCEPTION) CLEAR_PENDING_EXCEPTION;
   555 }

mercurial