src/share/vm/memory/referenceProcessor.cpp

Wed, 12 Oct 2011 10:25:51 -0700

author
johnc
date
Wed, 12 Oct 2011 10:25:51 -0700
changeset 3188
d1bdeef3e3e2
parent 3175
4dfb2df418f2
child 3210
bf2d2b8b1726
permissions
-rw-r--r--

7098282: G1: assert(interval >= 0) failed: Sanity check, referencePolicy.cpp: 76
Summary: There is a race between one thread successfully forwarding and copying the klass mirror for the SoftReference class (including the static master clock) and another thread attempting to use the master clock while attempting to discover a soft reference object. Maintain a shadow copy of the soft reference master clock and use the shadow during reference discovery and reference processing.
Reviewed-by: tonyp, brutisso, ysr

duke@435 1 /*
ysr@2651 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/javaClasses.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "gc_interface/collectedHeap.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 30 #include "memory/referencePolicy.hpp"
stefank@2314 31 #include "memory/referenceProcessor.hpp"
stefank@2314 32 #include "oops/oop.inline.hpp"
stefank@2314 33 #include "runtime/java.hpp"
stefank@2314 34 #include "runtime/jniHandles.hpp"
duke@435 35
ysr@888 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ysr@888 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
ysr@3117 38 bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
johnc@3188 39 jlong ReferenceProcessor::_soft_ref_timestamp_clock = 0;
ysr@888 40
duke@435 41 void referenceProcessor_init() {
duke@435 42 ReferenceProcessor::init_statics();
duke@435 43 }
duke@435 44
duke@435 45 void ReferenceProcessor::init_statics() {
johnc@3188 46 jlong now = os::javaTimeMillis();
johnc@3188 47
johnc@3188 48 // Initialize the soft ref timestamp clock.
johnc@3188 49 _soft_ref_timestamp_clock = now;
johnc@3188 50 // Also update the soft ref clock in j.l.r.SoftReference
johnc@3188 51 java_lang_ref_SoftReference::set_clock(_soft_ref_timestamp_clock);
duke@435 52
ysr@888 53 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
ysr@888 54 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
ysr@888 55 NOT_COMPILER2(LRUCurrentHeapPolicy());
ysr@888 56 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
ysr@888 57 vm_exit_during_initialization("Could not allocate reference policy object");
ysr@888 58 }
duke@435 59 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@435 60 RefDiscoveryPolicy == ReferentBasedDiscovery,
duke@435 61 "Unrecongnized RefDiscoveryPolicy");
ysr@3117 62 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
duke@435 63 }
duke@435 64
johnc@3188 65 void ReferenceProcessor::enable_discovery(bool verify_disabled, bool check_no_refs) {
johnc@3188 66 #ifdef ASSERT
johnc@3188 67 // Verify that we're not currently discovering refs
johnc@3188 68 assert(!verify_disabled || !_discovering_refs, "nested call?");
johnc@3188 69
johnc@3188 70 if (check_no_refs) {
johnc@3188 71 // Verify that the discovered lists are empty
johnc@3188 72 verify_no_references_recorded();
johnc@3188 73 }
johnc@3188 74 #endif // ASSERT
johnc@3188 75
johnc@3188 76 // Someone could have modified the value of the static
johnc@3188 77 // field in the j.l.r.SoftReference class that holds the
johnc@3188 78 // soft reference timestamp clock using reflection or
johnc@3188 79 // Unsafe between GCs. Unconditionally update the static
johnc@3188 80 // field in ReferenceProcessor here so that we use the new
johnc@3188 81 // value during reference discovery.
johnc@3188 82
johnc@3188 83 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
johnc@3188 84 _discovering_refs = true;
johnc@3188 85 }
johnc@3188 86
duke@435 87 ReferenceProcessor::ReferenceProcessor(MemRegion span,
ysr@2651 88 bool mt_processing,
ysr@2651 89 int mt_processing_degree,
ysr@2651 90 bool mt_discovery,
ysr@2651 91 int mt_discovery_degree,
coleenp@548 92 bool atomic_discovery,
ysr@2651 93 BoolObjectClosure* is_alive_non_header,
ysr@777 94 bool discovered_list_needs_barrier) :
duke@435 95 _discovering_refs(false),
duke@435 96 _enqueuing_is_done(false),
ysr@2651 97 _is_alive_non_header(is_alive_non_header),
ysr@777 98 _discovered_list_needs_barrier(discovered_list_needs_barrier),
ysr@777 99 _bs(NULL),
duke@435 100 _processing_is_mt(mt_processing),
duke@435 101 _next_id(0)
duke@435 102 {
duke@435 103 _span = span;
duke@435 104 _discovery_is_atomic = atomic_discovery;
duke@435 105 _discovery_is_mt = mt_discovery;
ysr@2651 106 _num_q = MAX2(1, mt_processing_degree);
ysr@2651 107 _max_num_q = MAX2(_num_q, mt_discovery_degree);
johnc@3175 108 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList,
johnc@3175 109 _max_num_q * number_of_subclasses_of_ref());
duke@435 110 if (_discoveredSoftRefs == NULL) {
duke@435 111 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@435 112 }
jmasa@2188 113 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
jmasa@2188 114 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
jmasa@2188 115 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
stefank@3115 116 // Initialized all entries to NULL
johnc@3175 117 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
stefank@3115 118 _discoveredSoftRefs[i].set_head(NULL);
duke@435 119 _discoveredSoftRefs[i].set_length(0);
duke@435 120 }
ysr@3117 121 // If we do barriers, cache a copy of the barrier set.
ysr@777 122 if (discovered_list_needs_barrier) {
ysr@777 123 _bs = Universe::heap()->barrier_set();
ysr@777 124 }
ysr@2651 125 setup_policy(false /* default soft ref policy */);
duke@435 126 }
duke@435 127
duke@435 128 #ifndef PRODUCT
duke@435 129 void ReferenceProcessor::verify_no_references_recorded() {
duke@435 130 guarantee(!_discovering_refs, "Discovering refs?");
johnc@3175 131 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
johnc@3175 132 guarantee(_discoveredSoftRefs[i].is_empty(),
duke@435 133 "Found non-empty discovered list");
duke@435 134 }
duke@435 135 }
duke@435 136 #endif
duke@435 137
duke@435 138 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
johnc@3175 139 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
coleenp@548 140 if (UseCompressedOops) {
coleenp@548 141 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 142 } else {
coleenp@548 143 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 144 }
duke@435 145 }
duke@435 146 }
duke@435 147
coleenp@548 148 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@435 149 // Update (advance) the soft ref master clock field. This must be done
duke@435 150 // after processing the soft ref list.
duke@435 151 jlong now = os::javaTimeMillis();
johnc@3188 152 jlong soft_ref_clock = java_lang_ref_SoftReference::clock();
johnc@3188 153 assert(soft_ref_clock == _soft_ref_timestamp_clock, "soft ref clocks out of sync");
johnc@3188 154
duke@435 155 NOT_PRODUCT(
johnc@3188 156 if (now < _soft_ref_timestamp_clock) {
johnc@3188 157 warning("time warp: "INT64_FORMAT" to "INT64_FORMAT,
johnc@3188 158 _soft_ref_timestamp_clock, now);
duke@435 159 }
duke@435 160 )
duke@435 161 // In product mode, protect ourselves from system time being adjusted
duke@435 162 // externally and going backward; see note in the implementation of
duke@435 163 // GenCollectedHeap::time_since_last_gc() for the right way to fix
duke@435 164 // this uniformly throughout the VM; see bug-id 4741166. XXX
johnc@3188 165 if (now > _soft_ref_timestamp_clock) {
johnc@3188 166 _soft_ref_timestamp_clock = now;
duke@435 167 java_lang_ref_SoftReference::set_clock(now);
duke@435 168 }
duke@435 169 // Else leave clock stalled at its old value until time progresses
duke@435 170 // past clock value.
duke@435 171 }
duke@435 172
coleenp@548 173 void ReferenceProcessor::process_discovered_references(
duke@435 174 BoolObjectClosure* is_alive,
duke@435 175 OopClosure* keep_alive,
duke@435 176 VoidClosure* complete_gc,
duke@435 177 AbstractRefProcTaskExecutor* task_executor) {
duke@435 178 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 179
duke@435 180 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@435 181 // Stop treating discovered references specially.
duke@435 182 disable_discovery();
duke@435 183
johnc@3188 184 // If discovery was concurrent, someone could have modified
johnc@3188 185 // the value of the static field in the j.l.r.SoftReference
johnc@3188 186 // class that holds the soft reference timestamp clock using
johnc@3188 187 // reflection or Unsafe between when discovery was enabled and
johnc@3188 188 // now. Unconditionally update the static field in ReferenceProcessor
johnc@3188 189 // here so that we use the new value during processing of the
johnc@3188 190 // discovered soft refs.
johnc@3188 191
johnc@3188 192 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
johnc@3188 193
duke@435 194 bool trace_time = PrintGCDetails && PrintReferenceGC;
duke@435 195 // Soft references
duke@435 196 {
duke@435 197 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
ysr@888 198 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
duke@435 199 is_alive, keep_alive, complete_gc, task_executor);
duke@435 200 }
duke@435 201
duke@435 202 update_soft_ref_master_clock();
duke@435 203
duke@435 204 // Weak references
duke@435 205 {
duke@435 206 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
duke@435 207 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
duke@435 208 is_alive, keep_alive, complete_gc, task_executor);
duke@435 209 }
duke@435 210
duke@435 211 // Final references
duke@435 212 {
duke@435 213 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
duke@435 214 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
duke@435 215 is_alive, keep_alive, complete_gc, task_executor);
duke@435 216 }
duke@435 217
duke@435 218 // Phantom references
duke@435 219 {
duke@435 220 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
duke@435 221 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
duke@435 222 is_alive, keep_alive, complete_gc, task_executor);
duke@435 223 }
duke@435 224
duke@435 225 // Weak global JNI references. It would make more sense (semantically) to
duke@435 226 // traverse these simultaneously with the regular weak references above, but
duke@435 227 // that is not how the JDK1.2 specification is. See #4126360. Native code can
duke@435 228 // thus use JNI weak references to circumvent the phantom references and
duke@435 229 // resurrect a "post-mortem" object.
duke@435 230 {
duke@435 231 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
duke@435 232 if (task_executor != NULL) {
duke@435 233 task_executor->set_single_threaded_mode();
duke@435 234 }
duke@435 235 process_phaseJNI(is_alive, keep_alive, complete_gc);
duke@435 236 }
duke@435 237 }
duke@435 238
duke@435 239 #ifndef PRODUCT
duke@435 240 // Calculate the number of jni handles.
coleenp@548 241 uint ReferenceProcessor::count_jni_refs() {
duke@435 242 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 243 public:
coleenp@548 244 virtual bool do_object_b(oop obj) { return true; }
coleenp@548 245 virtual void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 246 };
duke@435 247
duke@435 248 class CountHandleClosure: public OopClosure {
duke@435 249 private:
duke@435 250 int _count;
duke@435 251 public:
duke@435 252 CountHandleClosure(): _count(0) {}
coleenp@548 253 void do_oop(oop* unused) { _count++; }
coleenp@548 254 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
duke@435 255 int count() { return _count; }
duke@435 256 };
duke@435 257 CountHandleClosure global_handle_count;
duke@435 258 AlwaysAliveClosure always_alive;
duke@435 259 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
duke@435 260 return global_handle_count.count();
duke@435 261 }
duke@435 262 #endif
duke@435 263
duke@435 264 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
duke@435 265 OopClosure* keep_alive,
duke@435 266 VoidClosure* complete_gc) {
duke@435 267 #ifndef PRODUCT
duke@435 268 if (PrintGCDetails && PrintReferenceGC) {
duke@435 269 unsigned int count = count_jni_refs();
duke@435 270 gclog_or_tty->print(", %u refs", count);
duke@435 271 }
duke@435 272 #endif
duke@435 273 JNIHandles::weak_oops_do(is_alive, keep_alive);
duke@435 274 complete_gc->do_void();
duke@435 275 }
duke@435 276
coleenp@548 277
coleenp@548 278 template <class T>
phh@1558 279 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
phh@1558 280 AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 281
duke@435 282 // Remember old value of pending references list
coleenp@548 283 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
coleenp@548 284 T old_pending_list_value = *pending_list_addr;
duke@435 285
duke@435 286 // Enqueue references that are not made active again, and
duke@435 287 // clear the decks for the next collection (cycle).
coleenp@548 288 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
duke@435 289 // Do the oop-check on pending_list_addr missed in
duke@435 290 // enqueue_discovered_reflist. We should probably
duke@435 291 // do a raw oop_check so that future such idempotent
duke@435 292 // oop_stores relying on the oop-check side-effect
duke@435 293 // may be elided automatically and safely without
duke@435 294 // affecting correctness.
coleenp@548 295 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
duke@435 296
duke@435 297 // Stop treating discovered references specially.
coleenp@548 298 ref->disable_discovery();
duke@435 299
duke@435 300 // Return true if new pending references were added
duke@435 301 return old_pending_list_value != *pending_list_addr;
duke@435 302 }
duke@435 303
coleenp@548 304 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 305 NOT_PRODUCT(verify_ok_to_handle_reflists());
coleenp@548 306 if (UseCompressedOops) {
coleenp@548 307 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
coleenp@548 308 } else {
coleenp@548 309 return enqueue_discovered_ref_helper<oop>(this, task_executor);
coleenp@548 310 }
coleenp@548 311 }
coleenp@548 312
duke@435 313 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 314 HeapWord* pending_list_addr) {
duke@435 315 // Given a list of refs linked through the "discovered" field
ysr@3117 316 // (java.lang.ref.Reference.discovered), self-loop their "next" field
ysr@3117 317 // thus distinguishing them from active References, then
ysr@3117 318 // prepend them to the pending list.
ysr@3117 319 // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
ysr@3117 320 // the "next" field is used to chain the pending list, not the discovered
ysr@3117 321 // field.
ysr@3117 322
duke@435 323 if (TraceReferenceGC && PrintGCDetails) {
duke@435 324 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
duke@435 325 INTPTR_FORMAT, (address)refs_list.head());
duke@435 326 }
stefank@3115 327
stefank@3115 328 oop obj = NULL;
ysr@3117 329 oop next_d = refs_list.head();
ysr@3117 330 if (pending_list_uses_discovered_field()) { // New behaviour
ysr@3117 331 // Walk down the list, self-looping the next field
ysr@3117 332 // so that the References are not considered active.
ysr@3117 333 while (obj != next_d) {
ysr@3117 334 obj = next_d;
ysr@3117 335 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 336 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 337 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 338 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 339 obj, next_d);
ysr@3117 340 }
ysr@3117 341 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 342 "Reference not active; should not be discovered");
ysr@3117 343 // Self-loop next, so as to make Ref not active.
ysr@3117 344 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 345 if (next_d == obj) { // obj is last
ysr@3117 346 // Swap refs_list into pendling_list_addr and
ysr@3117 347 // set obj's discovered to what we read from pending_list_addr.
ysr@3117 348 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 349 // Need oop_check on pending_list_addr above;
ysr@3117 350 // see special oop-check code at the end of
ysr@3117 351 // enqueue_discovered_reflists() further below.
ysr@3117 352 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
ysr@3117 353 }
duke@435 354 }
ysr@3117 355 } else { // Old behaviour
ysr@3117 356 // Walk down the list, copying the discovered field into
ysr@3117 357 // the next field and clearing the discovered field.
ysr@3117 358 while (obj != next_d) {
ysr@3117 359 obj = next_d;
ysr@3117 360 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 361 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 362 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 363 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 364 obj, next_d);
ysr@3117 365 }
ysr@3117 366 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 367 "The reference should not be enqueued");
ysr@3117 368 if (next_d == obj) { // obj is last
ysr@3117 369 // Swap refs_list into pendling_list_addr and
ysr@3117 370 // set obj's next to what we read from pending_list_addr.
ysr@3117 371 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 372 // Need oop_check on pending_list_addr above;
ysr@3117 373 // see special oop-check code at the end of
ysr@3117 374 // enqueue_discovered_reflists() further below.
ysr@3117 375 if (old == NULL) {
ysr@3117 376 // obj should be made to point to itself, since
ysr@3117 377 // pending list was empty.
ysr@3117 378 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 379 } else {
ysr@3117 380 java_lang_ref_Reference::set_next(obj, old);
ysr@3117 381 }
duke@435 382 } else {
ysr@3117 383 java_lang_ref_Reference::set_next(obj, next_d);
duke@435 384 }
ysr@3117 385 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 386 }
duke@435 387 }
duke@435 388 }
duke@435 389
duke@435 390 // Parallel enqueue task
duke@435 391 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
duke@435 392 public:
duke@435 393 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
duke@435 394 DiscoveredList discovered_refs[],
coleenp@548 395 HeapWord* pending_list_addr,
duke@435 396 int n_queues)
duke@435 397 : EnqueueTask(ref_processor, discovered_refs,
stefank@3115 398 pending_list_addr, n_queues)
duke@435 399 { }
duke@435 400
coleenp@548 401 virtual void work(unsigned int work_id) {
ysr@2651 402 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
duke@435 403 // Simplest first cut: static partitioning.
duke@435 404 int index = work_id;
jmasa@2188 405 // The increment on "index" must correspond to the maximum number of queues
jmasa@2188 406 // (n_queues) with which that ReferenceProcessor was created. That
jmasa@2188 407 // is because of the "clever" way the discovered references lists were
ysr@2651 408 // allocated and are indexed into.
ysr@2651 409 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
jmasa@2188 410 for (int j = 0;
johnc@3175 411 j < ReferenceProcessor::number_of_subclasses_of_ref();
jmasa@2188 412 j++, index += _n_queues) {
duke@435 413 _ref_processor.enqueue_discovered_reflist(
duke@435 414 _refs_lists[index], _pending_list_addr);
stefank@3115 415 _refs_lists[index].set_head(NULL);
duke@435 416 _refs_lists[index].set_length(0);
duke@435 417 }
duke@435 418 }
duke@435 419 };
duke@435 420
duke@435 421 // Enqueue references that are not made active again
coleenp@548 422 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
duke@435 423 AbstractRefProcTaskExecutor* task_executor) {
duke@435 424 if (_processing_is_mt && task_executor != NULL) {
duke@435 425 // Parallel code
duke@435 426 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
stefank@3115 427 pending_list_addr, _max_num_q);
duke@435 428 task_executor->execute(tsk);
duke@435 429 } else {
duke@435 430 // Serial code: call the parent class's implementation
johnc@3175 431 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
duke@435 432 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
stefank@3115 433 _discoveredSoftRefs[i].set_head(NULL);
duke@435 434 _discoveredSoftRefs[i].set_length(0);
duke@435 435 }
duke@435 436 }
duke@435 437 }
duke@435 438
johnc@3175 439 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
duke@435 440 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
coleenp@548 441 oop discovered = java_lang_ref_Reference::discovered(_ref);
coleenp@548 442 assert(_discovered_addr && discovered->is_oop_or_null(),
duke@435 443 "discovered field is bad");
coleenp@548 444 _next = discovered;
duke@435 445 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
coleenp@548 446 _referent = java_lang_ref_Reference::referent(_ref);
duke@435 447 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@435 448 "Wrong oop found in java.lang.Reference object");
duke@435 449 assert(allow_null_referent ?
duke@435 450 _referent->is_oop_or_null()
duke@435 451 : _referent->is_oop(),
duke@435 452 "bad referent");
duke@435 453 }
duke@435 454
johnc@3175 455 void DiscoveredListIterator::remove() {
duke@435 456 assert(_ref->is_oop(), "Dropping a bad reference");
coleenp@548 457 oop_store_raw(_discovered_addr, NULL);
stefank@3115 458
coleenp@548 459 // First _prev_next ref actually points into DiscoveredList (gross).
stefank@3115 460 oop new_next;
stefank@3115 461 if (_next == _ref) {
stefank@3115 462 // At the end of the list, we should make _prev point to itself.
stefank@3115 463 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
stefank@3115 464 // and _prev will be NULL.
stefank@3115 465 new_next = _prev;
stefank@3115 466 } else {
stefank@3115 467 new_next = _next;
stefank@3115 468 }
stefank@3115 469
coleenp@548 470 if (UseCompressedOops) {
coleenp@548 471 // Remove Reference object from list.
stefank@3115 472 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
coleenp@548 473 } else {
coleenp@548 474 // Remove Reference object from list.
stefank@3115 475 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
coleenp@548 476 }
duke@435 477 NOT_PRODUCT(_removed++);
ysr@887 478 _refs_list.dec_length(1);
duke@435 479 }
duke@435 480
johnc@3175 481 // Make the Reference object active again.
johnc@3175 482 void DiscoveredListIterator::make_active() {
johnc@3175 483 // For G1 we don't want to use set_next - it
johnc@3175 484 // will dirty the card for the next field of
johnc@3175 485 // the reference object and will fail
johnc@3175 486 // CT verification.
johnc@3175 487 if (UseG1GC) {
johnc@3175 488 BarrierSet* bs = oopDesc::bs();
johnc@3175 489 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
johnc@3175 490
johnc@3175 491 if (UseCompressedOops) {
johnc@3175 492 bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
johnc@3175 493 } else {
johnc@3175 494 bs->write_ref_field_pre((oop*)next_addr, NULL);
johnc@3175 495 }
johnc@3175 496 java_lang_ref_Reference::set_next_raw(_ref, NULL);
stefank@3115 497 } else {
johnc@3175 498 java_lang_ref_Reference::set_next(_ref, NULL);
stefank@3115 499 }
johnc@3175 500 }
johnc@3175 501
johnc@3175 502 void DiscoveredListIterator::clear_referent() {
johnc@3175 503 oop_store_raw(_referent_addr, NULL);
duke@435 504 }
duke@435 505
duke@435 506 // NOTE: process_phase*() are largely similar, and at a high level
duke@435 507 // merely iterate over the extant list applying a predicate to
duke@435 508 // each of its elements and possibly removing that element from the
duke@435 509 // list and applying some further closures to that element.
duke@435 510 // We should consider the possibility of replacing these
duke@435 511 // process_phase*() methods by abstracting them into
duke@435 512 // a single general iterator invocation that receives appropriate
duke@435 513 // closures that accomplish this work.
duke@435 514
duke@435 515 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@435 516 // referents are not alive, but that should be kept alive for policy reasons.
duke@435 517 // Keep alive the transitive closure of all such referents.
duke@435 518 void
coleenp@548 519 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@435 520 ReferencePolicy* policy,
duke@435 521 BoolObjectClosure* is_alive,
duke@435 522 OopClosure* keep_alive,
duke@435 523 VoidClosure* complete_gc) {
duke@435 524 assert(policy != NULL, "Must have a non-NULL policy");
coleenp@548 525 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 526 // Decide which softly reachable refs should be kept alive.
duke@435 527 while (iter.has_next()) {
duke@435 528 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@435 529 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
johnc@3188 530 if (referent_is_dead &&
johnc@3188 531 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
duke@435 532 if (TraceReferenceGC) {
duke@435 533 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
coleenp@548 534 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 535 }
ysr@887 536 // Remove Reference object from list
ysr@887 537 iter.remove();
duke@435 538 // Make the Reference object active again
duke@435 539 iter.make_active();
duke@435 540 // keep the referent around
duke@435 541 iter.make_referent_alive();
ysr@887 542 iter.move_to_next();
duke@435 543 } else {
duke@435 544 iter.next();
duke@435 545 }
duke@435 546 }
duke@435 547 // Close the reachable set
duke@435 548 complete_gc->do_void();
duke@435 549 NOT_PRODUCT(
duke@435 550 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 551 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
ysr@3117 552 "discovered Refs by policy, from list " INTPTR_FORMAT,
jmasa@2188 553 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 554 }
duke@435 555 )
duke@435 556 }
duke@435 557
duke@435 558 // Traverse the list and remove any Refs that are not active, or
duke@435 559 // whose referents are either alive or NULL.
duke@435 560 void
coleenp@548 561 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@435 562 BoolObjectClosure* is_alive,
coleenp@548 563 OopClosure* keep_alive) {
duke@435 564 assert(discovery_is_atomic(), "Error");
coleenp@548 565 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 566 while (iter.has_next()) {
duke@435 567 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@548 568 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@548 569 assert(next == NULL, "Should not discover inactive Reference");
duke@435 570 if (iter.is_referent_alive()) {
duke@435 571 if (TraceReferenceGC) {
duke@435 572 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
coleenp@548 573 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 574 }
duke@435 575 // The referent is reachable after all.
ysr@887 576 // Remove Reference object from list.
ysr@887 577 iter.remove();
duke@435 578 // Update the referent pointer as necessary: Note that this
duke@435 579 // should not entail any recursive marking because the
duke@435 580 // referent must already have been traversed.
duke@435 581 iter.make_referent_alive();
ysr@887 582 iter.move_to_next();
duke@435 583 } else {
duke@435 584 iter.next();
duke@435 585 }
duke@435 586 }
duke@435 587 NOT_PRODUCT(
ysr@2651 588 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 589 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 590 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 591 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 592 }
duke@435 593 )
duke@435 594 }
duke@435 595
duke@435 596 void
coleenp@548 597 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@548 598 BoolObjectClosure* is_alive,
coleenp@548 599 OopClosure* keep_alive,
coleenp@548 600 VoidClosure* complete_gc) {
duke@435 601 assert(!discovery_is_atomic(), "Error");
coleenp@548 602 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 603 while (iter.has_next()) {
duke@435 604 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 605 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
coleenp@548 606 oop next = java_lang_ref_Reference::next(iter.obj());
duke@435 607 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 608 next != NULL)) {
coleenp@548 609 assert(next->is_oop_or_null(), "bad next field");
duke@435 610 // Remove Reference object from list
duke@435 611 iter.remove();
duke@435 612 // Trace the cohorts
duke@435 613 iter.make_referent_alive();
coleenp@548 614 if (UseCompressedOops) {
coleenp@548 615 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@548 616 } else {
coleenp@548 617 keep_alive->do_oop((oop*)next_addr);
coleenp@548 618 }
ysr@887 619 iter.move_to_next();
duke@435 620 } else {
duke@435 621 iter.next();
duke@435 622 }
duke@435 623 }
duke@435 624 // Now close the newly reachable set
duke@435 625 complete_gc->do_void();
duke@435 626 NOT_PRODUCT(
ysr@2651 627 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 628 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 629 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 630 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 631 }
duke@435 632 )
duke@435 633 }
duke@435 634
duke@435 635 // Traverse the list and process the referents, by either
coleenp@548 636 // clearing them or keeping them (and their reachable
duke@435 637 // closure) alive.
duke@435 638 void
coleenp@548 639 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
duke@435 640 bool clear_referent,
duke@435 641 BoolObjectClosure* is_alive,
duke@435 642 OopClosure* keep_alive,
duke@435 643 VoidClosure* complete_gc) {
jmasa@2188 644 ResourceMark rm;
coleenp@548 645 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 646 while (iter.has_next()) {
duke@435 647 iter.update_discovered();
duke@435 648 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@435 649 if (clear_referent) {
duke@435 650 // NULL out referent pointer
duke@435 651 iter.clear_referent();
duke@435 652 } else {
duke@435 653 // keep the referent around
duke@435 654 iter.make_referent_alive();
duke@435 655 }
duke@435 656 if (TraceReferenceGC) {
duke@435 657 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
duke@435 658 clear_referent ? "cleared " : "",
coleenp@548 659 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 660 }
duke@435 661 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
duke@435 662 iter.next();
duke@435 663 }
stefank@3115 664 // Remember to update the next pointer of the last ref.
duke@435 665 iter.update_discovered();
duke@435 666 // Close the reachable set
duke@435 667 complete_gc->do_void();
duke@435 668 }
duke@435 669
duke@435 670 void
stefank@3115 671 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
stefank@3115 672 oop obj = NULL;
stefank@3115 673 oop next = refs_list.head();
stefank@3115 674 while (next != obj) {
stefank@3115 675 obj = next;
stefank@3115 676 next = java_lang_ref_Reference::discovered(obj);
stefank@3115 677 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
stefank@3115 678 }
stefank@3115 679 refs_list.set_head(NULL);
stefank@3115 680 refs_list.set_length(0);
stefank@3115 681 }
stefank@3115 682
stefank@3115 683 void
coleenp@548 684 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
stefank@3115 685 clear_discovered_references(refs_list);
duke@435 686 }
duke@435 687
ysr@777 688 void ReferenceProcessor::abandon_partial_discovery() {
ysr@777 689 // loop over the lists
johnc@3175 690 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
jmasa@2188 691 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
johnc@3175 692 gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
ysr@777 693 }
ysr@777 694 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
duke@435 695 }
duke@435 696 }
duke@435 697
duke@435 698 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 699 public:
duke@435 700 RefProcPhase1Task(ReferenceProcessor& ref_processor,
duke@435 701 DiscoveredList refs_lists[],
duke@435 702 ReferencePolicy* policy,
duke@435 703 bool marks_oops_alive)
duke@435 704 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 705 _policy(policy)
duke@435 706 { }
duke@435 707 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 708 OopClosure& keep_alive,
duke@435 709 VoidClosure& complete_gc)
duke@435 710 {
jmasa@2188 711 Thread* thr = Thread::current();
jmasa@2188 712 int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 713 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
duke@435 714 &is_alive, &keep_alive, &complete_gc);
duke@435 715 }
duke@435 716 private:
duke@435 717 ReferencePolicy* _policy;
duke@435 718 };
duke@435 719
duke@435 720 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 721 public:
duke@435 722 RefProcPhase2Task(ReferenceProcessor& ref_processor,
duke@435 723 DiscoveredList refs_lists[],
duke@435 724 bool marks_oops_alive)
duke@435 725 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
duke@435 726 { }
duke@435 727 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 728 OopClosure& keep_alive,
duke@435 729 VoidClosure& complete_gc)
duke@435 730 {
duke@435 731 _ref_processor.process_phase2(_refs_lists[i],
duke@435 732 &is_alive, &keep_alive, &complete_gc);
duke@435 733 }
duke@435 734 };
duke@435 735
duke@435 736 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 737 public:
duke@435 738 RefProcPhase3Task(ReferenceProcessor& ref_processor,
duke@435 739 DiscoveredList refs_lists[],
duke@435 740 bool clear_referent,
duke@435 741 bool marks_oops_alive)
duke@435 742 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 743 _clear_referent(clear_referent)
duke@435 744 { }
duke@435 745 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 746 OopClosure& keep_alive,
duke@435 747 VoidClosure& complete_gc)
duke@435 748 {
jmasa@2188 749 // Don't use "refs_list_index" calculated in this way because
jmasa@2188 750 // balance_queues() has moved the Ref's into the first n queues.
jmasa@2188 751 // Thread* thr = Thread::current();
jmasa@2188 752 // int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 753 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
duke@435 754 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@435 755 &is_alive, &keep_alive, &complete_gc);
duke@435 756 }
duke@435 757 private:
duke@435 758 bool _clear_referent;
duke@435 759 };
duke@435 760
johnc@3175 761 void ReferenceProcessor::set_discovered(oop ref, oop value) {
johnc@3175 762 if (_discovered_list_needs_barrier) {
johnc@3175 763 java_lang_ref_Reference::set_discovered(ref, value);
johnc@3175 764 } else {
johnc@3175 765 java_lang_ref_Reference::set_discovered_raw(ref, value);
johnc@3175 766 }
johnc@3175 767 }
johnc@3175 768
duke@435 769 // Balances reference queues.
jmasa@2188 770 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
jmasa@2188 771 // queues[0, 1, ..., _num_q-1] because only the first _num_q
jmasa@2188 772 // corresponding to the active workers will be processed.
duke@435 773 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@435 774 {
duke@435 775 // calculate total length
duke@435 776 size_t total_refs = 0;
jmasa@2188 777 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 778 gclog_or_tty->print_cr("\nBalance ref_lists ");
jmasa@2188 779 }
jmasa@2188 780
jmasa@2188 781 for (int i = 0; i < _max_num_q; ++i) {
duke@435 782 total_refs += ref_lists[i].length();
jmasa@2188 783 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 784 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 785 }
jmasa@2188 786 }
jmasa@2188 787 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 788 gclog_or_tty->print_cr(" = %d", total_refs);
duke@435 789 }
duke@435 790 size_t avg_refs = total_refs / _num_q + 1;
duke@435 791 int to_idx = 0;
jmasa@2188 792 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
jmasa@2188 793 bool move_all = false;
jmasa@2188 794 if (from_idx >= _num_q) {
jmasa@2188 795 move_all = ref_lists[from_idx].length() > 0;
jmasa@2188 796 }
jmasa@2188 797 while ((ref_lists[from_idx].length() > avg_refs) ||
jmasa@2188 798 move_all) {
duke@435 799 assert(to_idx < _num_q, "Sanity Check!");
duke@435 800 if (ref_lists[to_idx].length() < avg_refs) {
duke@435 801 // move superfluous refs
jmasa@2188 802 size_t refs_to_move;
jmasa@2188 803 // Move all the Ref's if the from queue will not be processed.
jmasa@2188 804 if (move_all) {
jmasa@2188 805 refs_to_move = MIN2(ref_lists[from_idx].length(),
jmasa@2188 806 avg_refs - ref_lists[to_idx].length());
jmasa@2188 807 } else {
jmasa@2188 808 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
jmasa@2188 809 avg_refs - ref_lists[to_idx].length());
jmasa@2188 810 }
stefank@3115 811
stefank@3115 812 assert(refs_to_move > 0, "otherwise the code below will fail");
stefank@3115 813
duke@435 814 oop move_head = ref_lists[from_idx].head();
duke@435 815 oop move_tail = move_head;
duke@435 816 oop new_head = move_head;
duke@435 817 // find an element to split the list on
duke@435 818 for (size_t j = 0; j < refs_to_move; ++j) {
duke@435 819 move_tail = new_head;
coleenp@548 820 new_head = java_lang_ref_Reference::discovered(new_head);
duke@435 821 }
stefank@3115 822
stefank@3115 823 // Add the chain to the to list.
stefank@3115 824 if (ref_lists[to_idx].head() == NULL) {
stefank@3115 825 // to list is empty. Make a loop at the end.
johnc@3175 826 set_discovered(move_tail, move_tail);
stefank@3115 827 } else {
johnc@3175 828 set_discovered(move_tail, ref_lists[to_idx].head());
stefank@3115 829 }
duke@435 830 ref_lists[to_idx].set_head(move_head);
ysr@887 831 ref_lists[to_idx].inc_length(refs_to_move);
stefank@3115 832
stefank@3115 833 // Remove the chain from the from list.
stefank@3115 834 if (move_tail == new_head) {
stefank@3115 835 // We found the end of the from list.
stefank@3115 836 ref_lists[from_idx].set_head(NULL);
stefank@3115 837 } else {
stefank@3115 838 ref_lists[from_idx].set_head(new_head);
stefank@3115 839 }
ysr@887 840 ref_lists[from_idx].dec_length(refs_to_move);
jmasa@2188 841 if (ref_lists[from_idx].length() == 0) {
jmasa@2188 842 break;
jmasa@2188 843 }
duke@435 844 } else {
jmasa@2188 845 to_idx = (to_idx + 1) % _num_q;
duke@435 846 }
duke@435 847 }
duke@435 848 }
jmasa@2188 849 #ifdef ASSERT
jmasa@2188 850 size_t balanced_total_refs = 0;
jmasa@2188 851 for (int i = 0; i < _max_num_q; ++i) {
jmasa@2188 852 balanced_total_refs += ref_lists[i].length();
jmasa@2188 853 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 854 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 855 }
jmasa@2188 856 }
jmasa@2188 857 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 858 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
jmasa@2188 859 gclog_or_tty->flush();
jmasa@2188 860 }
jmasa@2188 861 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
jmasa@2188 862 #endif
jmasa@2188 863 }
jmasa@2188 864
jmasa@2188 865 void ReferenceProcessor::balance_all_queues() {
jmasa@2188 866 balance_queues(_discoveredSoftRefs);
jmasa@2188 867 balance_queues(_discoveredWeakRefs);
jmasa@2188 868 balance_queues(_discoveredFinalRefs);
jmasa@2188 869 balance_queues(_discoveredPhantomRefs);
duke@435 870 }
duke@435 871
duke@435 872 void
duke@435 873 ReferenceProcessor::process_discovered_reflist(
duke@435 874 DiscoveredList refs_lists[],
duke@435 875 ReferencePolicy* policy,
duke@435 876 bool clear_referent,
duke@435 877 BoolObjectClosure* is_alive,
duke@435 878 OopClosure* keep_alive,
duke@435 879 VoidClosure* complete_gc,
duke@435 880 AbstractRefProcTaskExecutor* task_executor)
duke@435 881 {
jmasa@2188 882 bool mt_processing = task_executor != NULL && _processing_is_mt;
jmasa@2188 883 // If discovery used MT and a dynamic number of GC threads, then
jmasa@2188 884 // the queues must be balanced for correctness if fewer than the
jmasa@2188 885 // maximum number of queues were used. The number of queue used
jmasa@2188 886 // during discovery may be different than the number to be used
jmasa@2188 887 // for processing so don't depend of _num_q < _max_num_q as part
jmasa@2188 888 // of the test.
jmasa@2188 889 bool must_balance = _discovery_is_mt;
jmasa@2188 890
jmasa@2188 891 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
jmasa@2188 892 must_balance) {
duke@435 893 balance_queues(refs_lists);
duke@435 894 }
duke@435 895 if (PrintReferenceGC && PrintGCDetails) {
duke@435 896 size_t total = 0;
ysr@2651 897 for (int i = 0; i < _max_num_q; ++i) {
duke@435 898 total += refs_lists[i].length();
duke@435 899 }
duke@435 900 gclog_or_tty->print(", %u refs", total);
duke@435 901 }
duke@435 902
duke@435 903 // Phase 1 (soft refs only):
duke@435 904 // . Traverse the list and remove any SoftReferences whose
duke@435 905 // referents are not alive, but that should be kept alive for
duke@435 906 // policy reasons. Keep alive the transitive closure of all
duke@435 907 // such referents.
duke@435 908 if (policy != NULL) {
jmasa@2188 909 if (mt_processing) {
duke@435 910 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
duke@435 911 task_executor->execute(phase1);
duke@435 912 } else {
ysr@2651 913 for (int i = 0; i < _max_num_q; i++) {
duke@435 914 process_phase1(refs_lists[i], policy,
duke@435 915 is_alive, keep_alive, complete_gc);
duke@435 916 }
duke@435 917 }
duke@435 918 } else { // policy == NULL
duke@435 919 assert(refs_lists != _discoveredSoftRefs,
duke@435 920 "Policy must be specified for soft references.");
duke@435 921 }
duke@435 922
duke@435 923 // Phase 2:
duke@435 924 // . Traverse the list and remove any refs whose referents are alive.
jmasa@2188 925 if (mt_processing) {
duke@435 926 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
duke@435 927 task_executor->execute(phase2);
duke@435 928 } else {
ysr@2651 929 for (int i = 0; i < _max_num_q; i++) {
duke@435 930 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
duke@435 931 }
duke@435 932 }
duke@435 933
duke@435 934 // Phase 3:
duke@435 935 // . Traverse the list and process referents as appropriate.
jmasa@2188 936 if (mt_processing) {
duke@435 937 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
duke@435 938 task_executor->execute(phase3);
duke@435 939 } else {
ysr@2651 940 for (int i = 0; i < _max_num_q; i++) {
duke@435 941 process_phase3(refs_lists[i], clear_referent,
duke@435 942 is_alive, keep_alive, complete_gc);
duke@435 943 }
duke@435 944 }
duke@435 945 }
duke@435 946
duke@435 947 void ReferenceProcessor::clean_up_discovered_references() {
duke@435 948 // loop over the lists
johnc@3175 949 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
ysr@2651 950 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
duke@435 951 gclog_or_tty->print_cr(
duke@435 952 "\nScrubbing %s discovered list of Null referents",
duke@435 953 list_name(i));
duke@435 954 }
duke@435 955 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
duke@435 956 }
duke@435 957 }
duke@435 958
duke@435 959 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
duke@435 960 assert(!discovery_is_atomic(), "Else why call this method?");
duke@435 961 DiscoveredListIterator iter(refs_list, NULL, NULL);
duke@435 962 while (iter.has_next()) {
duke@435 963 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 964 oop next = java_lang_ref_Reference::next(iter.obj());
coleenp@548 965 assert(next->is_oop_or_null(), "bad next field");
duke@435 966 // If referent has been cleared or Reference is not active,
duke@435 967 // drop it.
coleenp@548 968 if (iter.referent() == NULL || next != NULL) {
duke@435 969 debug_only(
duke@435 970 if (PrintGCDetails && TraceReferenceGC) {
duke@435 971 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
duke@435 972 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
duke@435 973 " and referent: " INTPTR_FORMAT,
coleenp@548 974 iter.obj(), next, iter.referent());
duke@435 975 }
duke@435 976 )
duke@435 977 // Remove Reference object from list
duke@435 978 iter.remove();
ysr@887 979 iter.move_to_next();
duke@435 980 } else {
duke@435 981 iter.next();
duke@435 982 }
duke@435 983 }
duke@435 984 NOT_PRODUCT(
duke@435 985 if (PrintGCDetails && TraceReferenceGC) {
duke@435 986 gclog_or_tty->print(
duke@435 987 " Removed %d Refs with NULL referents out of %d discovered Refs",
duke@435 988 iter.removed(), iter.processed());
duke@435 989 }
duke@435 990 )
duke@435 991 }
duke@435 992
duke@435 993 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
duke@435 994 int id = 0;
duke@435 995 // Determine the queue index to use for this object.
duke@435 996 if (_discovery_is_mt) {
duke@435 997 // During a multi-threaded discovery phase,
duke@435 998 // each thread saves to its "own" list.
duke@435 999 Thread* thr = Thread::current();
johnc@2316 1000 id = thr->as_Worker_thread()->id();
duke@435 1001 } else {
duke@435 1002 // single-threaded discovery, we save in round-robin
duke@435 1003 // fashion to each of the lists.
duke@435 1004 if (_processing_is_mt) {
duke@435 1005 id = next_id();
duke@435 1006 }
duke@435 1007 }
jmasa@2188 1008 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
duke@435 1009
duke@435 1010 // Get the discovered queue to which we will add
duke@435 1011 DiscoveredList* list = NULL;
duke@435 1012 switch (rt) {
duke@435 1013 case REF_OTHER:
duke@435 1014 // Unknown reference type, no special treatment
duke@435 1015 break;
duke@435 1016 case REF_SOFT:
duke@435 1017 list = &_discoveredSoftRefs[id];
duke@435 1018 break;
duke@435 1019 case REF_WEAK:
duke@435 1020 list = &_discoveredWeakRefs[id];
duke@435 1021 break;
duke@435 1022 case REF_FINAL:
duke@435 1023 list = &_discoveredFinalRefs[id];
duke@435 1024 break;
duke@435 1025 case REF_PHANTOM:
duke@435 1026 list = &_discoveredPhantomRefs[id];
duke@435 1027 break;
duke@435 1028 case REF_NONE:
duke@435 1029 // we should not reach here if we are an instanceRefKlass
duke@435 1030 default:
duke@435 1031 ShouldNotReachHere();
duke@435 1032 }
jmasa@2188 1033 if (TraceReferenceGC && PrintGCDetails) {
johnc@2316 1034 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
jmasa@2188 1035 }
duke@435 1036 return list;
duke@435 1037 }
duke@435 1038
coleenp@548 1039 inline void
coleenp@548 1040 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@548 1041 oop obj,
coleenp@548 1042 HeapWord* discovered_addr) {
duke@435 1043 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@435 1044 // First we must make sure this object is only enqueued once. CAS in a non null
duke@435 1045 // discovered_addr.
ysr@777 1046 oop current_head = refs_list.head();
stefank@3115 1047 // The last ref must have its discovered field pointing to itself.
stefank@3115 1048 oop next_discovered = (current_head != NULL) ? current_head : obj;
ysr@777 1049
ysr@1280 1050 // Note: In the case of G1, this specific pre-barrier is strictly
ysr@777 1051 // not necessary because the only case we are interested in
ysr@1280 1052 // here is when *discovered_addr is NULL (see the CAS further below),
ysr@1280 1053 // so this will expand to nothing. As a result, we have manually
ysr@1280 1054 // elided this out for G1, but left in the test for some future
ysr@3117 1055 // collector that might have need for a pre-barrier here, e.g.:-
ysr@3117 1056 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@3117 1057 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1058 "Need to check non-G1 collector: "
ysr@3117 1059 "may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1060 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
coleenp@548 1061 NULL);
duke@435 1062 if (retest == NULL) {
duke@435 1063 // This thread just won the right to enqueue the object.
ysr@3117 1064 // We have separate lists for enqueueing, so no synchronization
duke@435 1065 // is necessary.
coleenp@548 1066 refs_list.set_head(obj);
ysr@887 1067 refs_list.inc_length(1);
ysr@777 1068 if (_discovered_list_needs_barrier) {
stefank@3115 1069 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1070 }
johnc@2316 1071
johnc@2316 1072 if (TraceReferenceGC) {
ysr@3117 1073 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
johnc@2316 1074 obj, obj->blueprint()->internal_name());
johnc@2316 1075 }
duke@435 1076 } else {
duke@435 1077 // If retest was non NULL, another thread beat us to it:
duke@435 1078 // The reference has already been discovered...
duke@435 1079 if (TraceReferenceGC) {
ysr@3117 1080 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
duke@435 1081 obj, obj->blueprint()->internal_name());
duke@435 1082 }
duke@435 1083 }
duke@435 1084 }
duke@435 1085
ysr@2337 1086 #ifndef PRODUCT
ysr@2337 1087 // Non-atomic (i.e. concurrent) discovery might allow us
ysr@2337 1088 // to observe j.l.References with NULL referents, being those
ysr@2337 1089 // cleared concurrently by mutators during (or after) discovery.
ysr@2337 1090 void ReferenceProcessor::verify_referent(oop obj) {
ysr@2337 1091 bool da = discovery_is_atomic();
ysr@2337 1092 oop referent = java_lang_ref_Reference::referent(obj);
ysr@2337 1093 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
ysr@2337 1094 err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
ysr@2337 1095 INTPTR_FORMAT " during %satomic discovery ",
ysr@2337 1096 (intptr_t)referent, (intptr_t)obj, da ? "" : "non-"));
ysr@2337 1097 }
ysr@2337 1098 #endif
ysr@2337 1099
duke@435 1100 // We mention two of several possible choices here:
duke@435 1101 // #0: if the reference object is not in the "originating generation"
duke@435 1102 // (or part of the heap being collected, indicated by our "span"
duke@435 1103 // we don't treat it specially (i.e. we scan it as we would
duke@435 1104 // a normal oop, treating its references as strong references).
ysr@3117 1105 // This means that references can't be discovered unless their
duke@435 1106 // referent is also in the same span. This is the simplest,
duke@435 1107 // most "local" and most conservative approach, albeit one
duke@435 1108 // that may cause weak references to be enqueued least promptly.
duke@435 1109 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@435 1110 // #1: the reference object may be in any generation (span), but if
duke@435 1111 // the referent is in the generation (span) being currently collected
duke@435 1112 // then we can discover the reference object, provided
duke@435 1113 // the object has not already been discovered by
duke@435 1114 // a different concurrently running collector (as may be the
duke@435 1115 // case, for instance, if the reference object is in CMS and
duke@435 1116 // the referent in DefNewGeneration), and provided the processing
duke@435 1117 // of this reference object by the current collector will
duke@435 1118 // appear atomic to every other collector in the system.
duke@435 1119 // (Thus, for instance, a concurrent collector may not
duke@435 1120 // discover references in other generations even if the
duke@435 1121 // referent is in its own generation). This policy may,
duke@435 1122 // in certain cases, enqueue references somewhat sooner than
duke@435 1123 // might Policy #0 above, but at marginally increased cost
duke@435 1124 // and complexity in processing these references.
duke@435 1125 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@435 1126 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
ysr@3117 1127 // Make sure we are discovering refs (rather than processing discovered refs).
duke@435 1128 if (!_discovering_refs || !RegisterReferences) {
duke@435 1129 return false;
duke@435 1130 }
ysr@3117 1131 // We only discover active references.
coleenp@548 1132 oop next = java_lang_ref_Reference::next(obj);
ysr@3117 1133 if (next != NULL) { // Ref is no longer active
duke@435 1134 return false;
duke@435 1135 }
duke@435 1136
duke@435 1137 HeapWord* obj_addr = (HeapWord*)obj;
duke@435 1138 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1139 !_span.contains(obj_addr)) {
duke@435 1140 // Reference is not in the originating generation;
duke@435 1141 // don't treat it specially (i.e. we want to scan it as a normal
duke@435 1142 // object with strong references).
duke@435 1143 return false;
duke@435 1144 }
duke@435 1145
ysr@3117 1146 // We only discover references whose referents are not (yet)
ysr@3117 1147 // known to be strongly reachable.
duke@435 1148 if (is_alive_non_header() != NULL) {
ysr@2337 1149 verify_referent(obj);
ysr@2337 1150 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
duke@435 1151 return false; // referent is reachable
duke@435 1152 }
duke@435 1153 }
ysr@888 1154 if (rt == REF_SOFT) {
ysr@888 1155 // For soft refs we can decide now if these are not
ysr@888 1156 // current candidates for clearing, in which case we
ysr@888 1157 // can mark through them now, rather than delaying that
ysr@888 1158 // to the reference-processing phase. Since all current
ysr@888 1159 // time-stamp policies advance the soft-ref clock only
ysr@888 1160 // at a major collection cycle, this is always currently
ysr@888 1161 // accurate.
johnc@3188 1162 if (!_current_soft_ref_policy->should_clear_reference(obj, _soft_ref_timestamp_clock)) {
ysr@888 1163 return false;
ysr@888 1164 }
ysr@888 1165 }
duke@435 1166
johnc@3175 1167 ResourceMark rm; // Needed for tracing.
johnc@3175 1168
ysr@777 1169 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
ysr@777 1170 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 1171 assert(discovered->is_oop_or_null(), "bad discovered field");
coleenp@548 1172 if (discovered != NULL) {
duke@435 1173 // The reference has already been discovered...
duke@435 1174 if (TraceReferenceGC) {
ysr@3117 1175 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1176 obj, obj->blueprint()->internal_name());
duke@435 1177 }
duke@435 1178 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1179 // assumes that an object is not processed twice;
duke@435 1180 // if it's been already discovered it must be on another
duke@435 1181 // generation's discovered list; so we won't discover it.
duke@435 1182 return false;
duke@435 1183 } else {
duke@435 1184 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@435 1185 "Unrecognized policy");
duke@435 1186 // Check assumption that an object is not potentially
duke@435 1187 // discovered twice except by concurrent collectors that potentially
duke@435 1188 // trace the same Reference object twice.
johnc@2316 1189 assert(UseConcMarkSweepGC || UseG1GC,
johnc@2316 1190 "Only possible with a concurrent marking collector");
duke@435 1191 return true;
duke@435 1192 }
duke@435 1193 }
duke@435 1194
duke@435 1195 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
ysr@2337 1196 verify_referent(obj);
ysr@3117 1197 // Discover if and only if EITHER:
ysr@3117 1198 // .. reference is in our span, OR
ysr@3117 1199 // .. we are an atomic collector and referent is in our span
duke@435 1200 if (_span.contains(obj_addr) ||
ysr@2337 1201 (discovery_is_atomic() &&
ysr@2337 1202 _span.contains(java_lang_ref_Reference::referent(obj)))) {
duke@435 1203 // should_enqueue = true;
duke@435 1204 } else {
duke@435 1205 return false;
duke@435 1206 }
duke@435 1207 } else {
duke@435 1208 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1209 _span.contains(obj_addr), "code inconsistency");
duke@435 1210 }
duke@435 1211
duke@435 1212 // Get the right type of discovered queue head.
duke@435 1213 DiscoveredList* list = get_discovered_list(rt);
duke@435 1214 if (list == NULL) {
duke@435 1215 return false; // nothing special needs to be done
duke@435 1216 }
duke@435 1217
duke@435 1218 if (_discovery_is_mt) {
duke@435 1219 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@435 1220 } else {
ysr@777 1221 // If "_discovered_list_needs_barrier", we do write barriers when
ysr@777 1222 // updating the discovered reference list. Otherwise, we do a raw store
ysr@777 1223 // here: the field will be visited later when processing the discovered
ysr@777 1224 // references.
ysr@777 1225 oop current_head = list->head();
stefank@3115 1226 // The last ref must have its discovered field pointing to itself.
stefank@3115 1227 oop next_discovered = (current_head != NULL) ? current_head : obj;
stefank@3115 1228
ysr@777 1229 // As in the case further above, since we are over-writing a NULL
ysr@777 1230 // pre-value, we can safely elide the pre-barrier here for the case of G1.
ysr@3117 1231 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@777 1232 assert(discovered == NULL, "control point invariant");
ysr@3117 1233 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1234 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1235 oop_store_raw(discovered_addr, next_discovered);
ysr@777 1236 if (_discovered_list_needs_barrier) {
stefank@3115 1237 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1238 }
duke@435 1239 list->set_head(obj);
ysr@887 1240 list->inc_length(1);
duke@435 1241
johnc@2316 1242 if (TraceReferenceGC) {
ysr@3117 1243 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
johnc@2316 1244 obj, obj->blueprint()->internal_name());
duke@435 1245 }
duke@435 1246 }
ysr@3117 1247 assert(obj->is_oop(), "Discovered a bad reference");
ysr@2337 1248 verify_referent(obj);
duke@435 1249 return true;
duke@435 1250 }
duke@435 1251
duke@435 1252 // Preclean the discovered references by removing those
duke@435 1253 // whose referents are alive, and by marking from those that
duke@435 1254 // are not active. These lists can be handled here
duke@435 1255 // in any order and, indeed, concurrently.
duke@435 1256 void ReferenceProcessor::preclean_discovered_references(
duke@435 1257 BoolObjectClosure* is_alive,
duke@435 1258 OopClosure* keep_alive,
duke@435 1259 VoidClosure* complete_gc,
jmasa@1625 1260 YieldClosure* yield,
jmasa@1625 1261 bool should_unload_classes) {
duke@435 1262
duke@435 1263 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 1264
jmasa@1370 1265 #ifdef ASSERT
jmasa@1370 1266 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
jmasa@1625 1267 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
jmasa@1625 1268 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
jmasa@1625 1269 UseConcMarkSweepGC && should_unload_classes;
jmasa@1370 1270 RememberKlassesChecker mx(must_remember_klasses);
jmasa@1370 1271 #endif
duke@435 1272 // Soft references
duke@435 1273 {
duke@435 1274 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1275 false, gclog_or_tty);
jmasa@2188 1276 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1277 if (yield->should_return()) {
ysr@887 1278 return;
ysr@887 1279 }
duke@435 1280 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@435 1281 keep_alive, complete_gc, yield);
duke@435 1282 }
duke@435 1283 }
duke@435 1284
duke@435 1285 // Weak references
duke@435 1286 {
duke@435 1287 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1288 false, gclog_or_tty);
ysr@2651 1289 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1290 if (yield->should_return()) {
ysr@887 1291 return;
ysr@887 1292 }
duke@435 1293 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@435 1294 keep_alive, complete_gc, yield);
duke@435 1295 }
duke@435 1296 }
duke@435 1297
duke@435 1298 // Final references
duke@435 1299 {
duke@435 1300 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1301 false, gclog_or_tty);
ysr@2651 1302 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1303 if (yield->should_return()) {
ysr@887 1304 return;
ysr@887 1305 }
duke@435 1306 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@435 1307 keep_alive, complete_gc, yield);
duke@435 1308 }
duke@435 1309 }
duke@435 1310
duke@435 1311 // Phantom references
duke@435 1312 {
duke@435 1313 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1314 false, gclog_or_tty);
ysr@2651 1315 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1316 if (yield->should_return()) {
ysr@887 1317 return;
ysr@887 1318 }
duke@435 1319 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@435 1320 keep_alive, complete_gc, yield);
duke@435 1321 }
duke@435 1322 }
duke@435 1323 }
duke@435 1324
duke@435 1325 // Walk the given discovered ref list, and remove all reference objects
duke@435 1326 // whose referents are still alive, whose referents are NULL or which
ysr@887 1327 // are not active (have a non-NULL next field). NOTE: When we are
ysr@887 1328 // thus precleaning the ref lists (which happens single-threaded today),
ysr@887 1329 // we do not disable refs discovery to honour the correct semantics of
ysr@887 1330 // java.lang.Reference. As a result, we need to be careful below
ysr@887 1331 // that ref removal steps interleave safely with ref discovery steps
ysr@887 1332 // (in this thread).
coleenp@548 1333 void
coleenp@548 1334 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 1335 BoolObjectClosure* is_alive,
coleenp@548 1336 OopClosure* keep_alive,
coleenp@548 1337 VoidClosure* complete_gc,
coleenp@548 1338 YieldClosure* yield) {
duke@435 1339 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 1340 while (iter.has_next()) {
duke@435 1341 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1342 oop obj = iter.obj();
coleenp@548 1343 oop next = java_lang_ref_Reference::next(obj);
duke@435 1344 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 1345 next != NULL) {
duke@435 1346 // The referent has been cleared, or is alive, or the Reference is not
duke@435 1347 // active; we need to trace and mark its cohort.
duke@435 1348 if (TraceReferenceGC) {
duke@435 1349 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
duke@435 1350 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 1351 }
duke@435 1352 // Remove Reference object from list
duke@435 1353 iter.remove();
duke@435 1354 // Keep alive its cohort.
duke@435 1355 iter.make_referent_alive();
coleenp@548 1356 if (UseCompressedOops) {
coleenp@548 1357 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1358 keep_alive->do_oop(next_addr);
coleenp@548 1359 } else {
coleenp@548 1360 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1361 keep_alive->do_oop(next_addr);
coleenp@548 1362 }
ysr@887 1363 iter.move_to_next();
duke@435 1364 } else {
duke@435 1365 iter.next();
duke@435 1366 }
duke@435 1367 }
duke@435 1368 // Close the reachable set
duke@435 1369 complete_gc->do_void();
duke@435 1370
duke@435 1371 NOT_PRODUCT(
ysr@2651 1372 if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
jmasa@2188 1373 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
jmasa@2188 1374 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 1375 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 1376 }
duke@435 1377 )
duke@435 1378 }
duke@435 1379
duke@435 1380 const char* ReferenceProcessor::list_name(int i) {
johnc@3175 1381 assert(i >= 0 && i <= _max_num_q * number_of_subclasses_of_ref(),
johnc@3175 1382 "Out of bounds index");
johnc@3175 1383
jmasa@2188 1384 int j = i / _max_num_q;
duke@435 1385 switch (j) {
duke@435 1386 case 0: return "SoftRef";
duke@435 1387 case 1: return "WeakRef";
duke@435 1388 case 2: return "FinalRef";
duke@435 1389 case 3: return "PhantomRef";
duke@435 1390 }
duke@435 1391 ShouldNotReachHere();
duke@435 1392 return NULL;
duke@435 1393 }
duke@435 1394
duke@435 1395 #ifndef PRODUCT
duke@435 1396 void ReferenceProcessor::verify_ok_to_handle_reflists() {
duke@435 1397 // empty for now
duke@435 1398 }
duke@435 1399 #endif
duke@435 1400
duke@435 1401 #ifndef PRODUCT
duke@435 1402 void ReferenceProcessor::clear_discovered_references() {
duke@435 1403 guarantee(!_discovering_refs, "Discovering refs?");
johnc@3175 1404 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
stefank@3115 1405 clear_discovered_references(_discoveredSoftRefs[i]);
duke@435 1406 }
duke@435 1407 }
stefank@3115 1408
duke@435 1409 #endif // PRODUCT

mercurial