src/share/vm/memory/referenceProcessor.cpp

Wed, 01 Dec 2010 17:34:02 -0800

author
johnc
date
Wed, 01 Dec 2010 17:34:02 -0800
changeset 2316
fd1d227ef1b9
parent 2314
f95d63e2154a
child 2337
8df09fb45352
permissions
-rw-r--r--

6983204: G1: Nightly test nsk/regression/b4958615 failing with +ExplicitGCInvokesConcurrent
Summary: Enable reference discovery during concurrent marking by setting the reference processor field of the concurrent marking closure. Keep reference objects on the discovered reference lists alive during incremental evacuation pauses until they are processed at the end of concurrent marking.
Reviewed-by: ysr, tonyp

duke@435 1 /*
jmasa@2188 2 * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/javaClasses.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "gc_interface/collectedHeap.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 30 #include "memory/referencePolicy.hpp"
stefank@2314 31 #include "memory/referenceProcessor.hpp"
stefank@2314 32 #include "oops/oop.inline.hpp"
stefank@2314 33 #include "runtime/java.hpp"
stefank@2314 34 #include "runtime/jniHandles.hpp"
duke@435 35
ysr@888 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ysr@888 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
ysr@888 38 oop ReferenceProcessor::_sentinelRef = NULL;
ysr@888 39 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
ysr@888 40
duke@435 41 // List of discovered references.
duke@435 42 class DiscoveredList {
duke@435 43 public:
coleenp@548 44 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
coleenp@548 45 oop head() const {
coleenp@548 46 return UseCompressedOops ? oopDesc::decode_heap_oop_not_null(_compressed_head) :
coleenp@548 47 _oop_head;
coleenp@548 48 }
coleenp@548 49 HeapWord* adr_head() {
coleenp@548 50 return UseCompressedOops ? (HeapWord*)&_compressed_head :
coleenp@548 51 (HeapWord*)&_oop_head;
coleenp@548 52 }
coleenp@548 53 void set_head(oop o) {
coleenp@548 54 if (UseCompressedOops) {
coleenp@548 55 // Must compress the head ptr.
coleenp@548 56 _compressed_head = oopDesc::encode_heap_oop_not_null(o);
coleenp@548 57 } else {
coleenp@548 58 _oop_head = o;
coleenp@548 59 }
coleenp@548 60 }
coleenp@548 61 bool empty() const { return head() == ReferenceProcessor::sentinel_ref(); }
duke@435 62 size_t length() { return _len; }
ysr@887 63 void set_length(size_t len) { _len = len; }
ysr@887 64 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
ysr@887 65 void dec_length(size_t dec) { _len -= dec; }
duke@435 66 private:
coleenp@548 67 // Set value depending on UseCompressedOops. This could be a template class
coleenp@548 68 // but then we have to fix all the instantiations and declarations that use this class.
coleenp@548 69 oop _oop_head;
coleenp@548 70 narrowOop _compressed_head;
duke@435 71 size_t _len;
duke@435 72 };
duke@435 73
duke@435 74 void referenceProcessor_init() {
duke@435 75 ReferenceProcessor::init_statics();
duke@435 76 }
duke@435 77
duke@435 78 void ReferenceProcessor::init_statics() {
coleenp@548 79 assert(_sentinelRef == NULL, "should be initialized precisely once");
duke@435 80 EXCEPTION_MARK;
duke@435 81 _sentinelRef = instanceKlass::cast(
never@1577 82 SystemDictionary::Reference_klass())->
coleenp@548 83 allocate_permanent_instance(THREAD);
duke@435 84
duke@435 85 // Initialize the master soft ref clock.
duke@435 86 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
duke@435 87
duke@435 88 if (HAS_PENDING_EXCEPTION) {
duke@435 89 Handle ex(THREAD, PENDING_EXCEPTION);
duke@435 90 vm_exit_during_initialization(ex);
duke@435 91 }
duke@435 92 assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
duke@435 93 "Just constructed it!");
ysr@888 94 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
ysr@888 95 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
ysr@888 96 NOT_COMPILER2(LRUCurrentHeapPolicy());
ysr@888 97 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
ysr@888 98 vm_exit_during_initialization("Could not allocate reference policy object");
ysr@888 99 }
duke@435 100 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@435 101 RefDiscoveryPolicy == ReferentBasedDiscovery,
duke@435 102 "Unrecongnized RefDiscoveryPolicy");
duke@435 103 }
duke@435 104
coleenp@548 105 ReferenceProcessor*
coleenp@548 106 ReferenceProcessor::create_ref_processor(MemRegion span,
coleenp@548 107 bool atomic_discovery,
coleenp@548 108 bool mt_discovery,
coleenp@548 109 BoolObjectClosure* is_alive_non_header,
coleenp@548 110 int parallel_gc_threads,
ysr@777 111 bool mt_processing,
ysr@777 112 bool dl_needs_barrier) {
duke@435 113 int mt_degree = 1;
duke@435 114 if (parallel_gc_threads > 1) {
duke@435 115 mt_degree = parallel_gc_threads;
duke@435 116 }
duke@435 117 ReferenceProcessor* rp =
duke@435 118 new ReferenceProcessor(span, atomic_discovery,
duke@435 119 mt_discovery, mt_degree,
ysr@777 120 mt_processing && (parallel_gc_threads > 0),
ysr@777 121 dl_needs_barrier);
duke@435 122 if (rp == NULL) {
duke@435 123 vm_exit_during_initialization("Could not allocate ReferenceProcessor object");
duke@435 124 }
duke@435 125 rp->set_is_alive_non_header(is_alive_non_header);
ysr@892 126 rp->setup_policy(false /* default soft ref policy */);
duke@435 127 return rp;
duke@435 128 }
duke@435 129
duke@435 130 ReferenceProcessor::ReferenceProcessor(MemRegion span,
coleenp@548 131 bool atomic_discovery,
coleenp@548 132 bool mt_discovery,
coleenp@548 133 int mt_degree,
ysr@777 134 bool mt_processing,
ysr@777 135 bool discovered_list_needs_barrier) :
duke@435 136 _discovering_refs(false),
duke@435 137 _enqueuing_is_done(false),
duke@435 138 _is_alive_non_header(NULL),
ysr@777 139 _discovered_list_needs_barrier(discovered_list_needs_barrier),
ysr@777 140 _bs(NULL),
duke@435 141 _processing_is_mt(mt_processing),
duke@435 142 _next_id(0)
duke@435 143 {
duke@435 144 _span = span;
duke@435 145 _discovery_is_atomic = atomic_discovery;
duke@435 146 _discovery_is_mt = mt_discovery;
duke@435 147 _num_q = mt_degree;
jmasa@2188 148 _max_num_q = mt_degree;
jmasa@2188 149 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
duke@435 150 if (_discoveredSoftRefs == NULL) {
duke@435 151 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@435 152 }
jmasa@2188 153 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
jmasa@2188 154 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
jmasa@2188 155 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
coleenp@548 156 assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
duke@435 157 // Initialized all entries to _sentinelRef
jmasa@2188 158 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
coleenp@548 159 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 160 _discoveredSoftRefs[i].set_length(0);
duke@435 161 }
ysr@777 162 // If we do barreirs, cache a copy of the barrier set.
ysr@777 163 if (discovered_list_needs_barrier) {
ysr@777 164 _bs = Universe::heap()->barrier_set();
ysr@777 165 }
duke@435 166 }
duke@435 167
duke@435 168 #ifndef PRODUCT
duke@435 169 void ReferenceProcessor::verify_no_references_recorded() {
duke@435 170 guarantee(!_discovering_refs, "Discovering refs?");
jmasa@2188 171 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 172 guarantee(_discoveredSoftRefs[i].empty(),
duke@435 173 "Found non-empty discovered list");
duke@435 174 }
duke@435 175 }
duke@435 176 #endif
duke@435 177
duke@435 178 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
jmasa@2188 179 // Should this instead be
jmasa@2188 180 // for (int i = 0; i < subclasses_of_ref; i++_ {
jmasa@2188 181 // for (int j = 0; j < _num_q; j++) {
jmasa@2188 182 // int index = i * _max_num_q + j;
jmasa@2188 183 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
coleenp@548 184 if (UseCompressedOops) {
coleenp@548 185 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 186 } else {
coleenp@548 187 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 188 }
duke@435 189 }
duke@435 190 }
duke@435 191
duke@435 192 void ReferenceProcessor::oops_do(OopClosure* f) {
coleenp@548 193 f->do_oop(adr_sentinel_ref());
duke@435 194 }
duke@435 195
coleenp@548 196 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@435 197 // Update (advance) the soft ref master clock field. This must be done
duke@435 198 // after processing the soft ref list.
duke@435 199 jlong now = os::javaTimeMillis();
duke@435 200 jlong clock = java_lang_ref_SoftReference::clock();
duke@435 201 NOT_PRODUCT(
duke@435 202 if (now < clock) {
duke@435 203 warning("time warp: %d to %d", clock, now);
duke@435 204 }
duke@435 205 )
duke@435 206 // In product mode, protect ourselves from system time being adjusted
duke@435 207 // externally and going backward; see note in the implementation of
duke@435 208 // GenCollectedHeap::time_since_last_gc() for the right way to fix
duke@435 209 // this uniformly throughout the VM; see bug-id 4741166. XXX
duke@435 210 if (now > clock) {
duke@435 211 java_lang_ref_SoftReference::set_clock(now);
duke@435 212 }
duke@435 213 // Else leave clock stalled at its old value until time progresses
duke@435 214 // past clock value.
duke@435 215 }
duke@435 216
coleenp@548 217 void ReferenceProcessor::process_discovered_references(
duke@435 218 BoolObjectClosure* is_alive,
duke@435 219 OopClosure* keep_alive,
duke@435 220 VoidClosure* complete_gc,
duke@435 221 AbstractRefProcTaskExecutor* task_executor) {
duke@435 222 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 223
duke@435 224 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@435 225 // Stop treating discovered references specially.
duke@435 226 disable_discovery();
duke@435 227
duke@435 228 bool trace_time = PrintGCDetails && PrintReferenceGC;
duke@435 229 // Soft references
duke@435 230 {
duke@435 231 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
ysr@888 232 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
duke@435 233 is_alive, keep_alive, complete_gc, task_executor);
duke@435 234 }
duke@435 235
duke@435 236 update_soft_ref_master_clock();
duke@435 237
duke@435 238 // Weak references
duke@435 239 {
duke@435 240 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
duke@435 241 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
duke@435 242 is_alive, keep_alive, complete_gc, task_executor);
duke@435 243 }
duke@435 244
duke@435 245 // Final references
duke@435 246 {
duke@435 247 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
duke@435 248 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
duke@435 249 is_alive, keep_alive, complete_gc, task_executor);
duke@435 250 }
duke@435 251
duke@435 252 // Phantom references
duke@435 253 {
duke@435 254 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
duke@435 255 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
duke@435 256 is_alive, keep_alive, complete_gc, task_executor);
duke@435 257 }
duke@435 258
duke@435 259 // Weak global JNI references. It would make more sense (semantically) to
duke@435 260 // traverse these simultaneously with the regular weak references above, but
duke@435 261 // that is not how the JDK1.2 specification is. See #4126360. Native code can
duke@435 262 // thus use JNI weak references to circumvent the phantom references and
duke@435 263 // resurrect a "post-mortem" object.
duke@435 264 {
duke@435 265 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
duke@435 266 if (task_executor != NULL) {
duke@435 267 task_executor->set_single_threaded_mode();
duke@435 268 }
duke@435 269 process_phaseJNI(is_alive, keep_alive, complete_gc);
duke@435 270 }
duke@435 271 }
duke@435 272
duke@435 273 #ifndef PRODUCT
duke@435 274 // Calculate the number of jni handles.
coleenp@548 275 uint ReferenceProcessor::count_jni_refs() {
duke@435 276 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 277 public:
coleenp@548 278 virtual bool do_object_b(oop obj) { return true; }
coleenp@548 279 virtual void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 280 };
duke@435 281
duke@435 282 class CountHandleClosure: public OopClosure {
duke@435 283 private:
duke@435 284 int _count;
duke@435 285 public:
duke@435 286 CountHandleClosure(): _count(0) {}
coleenp@548 287 void do_oop(oop* unused) { _count++; }
coleenp@548 288 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
duke@435 289 int count() { return _count; }
duke@435 290 };
duke@435 291 CountHandleClosure global_handle_count;
duke@435 292 AlwaysAliveClosure always_alive;
duke@435 293 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
duke@435 294 return global_handle_count.count();
duke@435 295 }
duke@435 296 #endif
duke@435 297
duke@435 298 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
duke@435 299 OopClosure* keep_alive,
duke@435 300 VoidClosure* complete_gc) {
duke@435 301 #ifndef PRODUCT
duke@435 302 if (PrintGCDetails && PrintReferenceGC) {
duke@435 303 unsigned int count = count_jni_refs();
duke@435 304 gclog_or_tty->print(", %u refs", count);
duke@435 305 }
duke@435 306 #endif
duke@435 307 JNIHandles::weak_oops_do(is_alive, keep_alive);
duke@435 308 // Finally remember to keep sentinel around
coleenp@548 309 keep_alive->do_oop(adr_sentinel_ref());
duke@435 310 complete_gc->do_void();
duke@435 311 }
duke@435 312
coleenp@548 313
coleenp@548 314 template <class T>
phh@1558 315 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
phh@1558 316 AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 317
duke@435 318 // Remember old value of pending references list
coleenp@548 319 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
coleenp@548 320 T old_pending_list_value = *pending_list_addr;
duke@435 321
duke@435 322 // Enqueue references that are not made active again, and
duke@435 323 // clear the decks for the next collection (cycle).
coleenp@548 324 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
duke@435 325 // Do the oop-check on pending_list_addr missed in
duke@435 326 // enqueue_discovered_reflist. We should probably
duke@435 327 // do a raw oop_check so that future such idempotent
duke@435 328 // oop_stores relying on the oop-check side-effect
duke@435 329 // may be elided automatically and safely without
duke@435 330 // affecting correctness.
coleenp@548 331 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
duke@435 332
duke@435 333 // Stop treating discovered references specially.
coleenp@548 334 ref->disable_discovery();
duke@435 335
duke@435 336 // Return true if new pending references were added
duke@435 337 return old_pending_list_value != *pending_list_addr;
duke@435 338 }
duke@435 339
coleenp@548 340 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 341 NOT_PRODUCT(verify_ok_to_handle_reflists());
coleenp@548 342 if (UseCompressedOops) {
coleenp@548 343 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
coleenp@548 344 } else {
coleenp@548 345 return enqueue_discovered_ref_helper<oop>(this, task_executor);
coleenp@548 346 }
coleenp@548 347 }
coleenp@548 348
duke@435 349 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 350 HeapWord* pending_list_addr) {
duke@435 351 // Given a list of refs linked through the "discovered" field
duke@435 352 // (java.lang.ref.Reference.discovered) chain them through the
duke@435 353 // "next" field (java.lang.ref.Reference.next) and prepend
duke@435 354 // to the pending list.
duke@435 355 if (TraceReferenceGC && PrintGCDetails) {
duke@435 356 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
duke@435 357 INTPTR_FORMAT, (address)refs_list.head());
duke@435 358 }
duke@435 359 oop obj = refs_list.head();
duke@435 360 // Walk down the list, copying the discovered field into
duke@435 361 // the next field and clearing it (except for the last
duke@435 362 // non-sentinel object which is treated specially to avoid
duke@435 363 // confusion with an active reference).
coleenp@548 364 while (obj != sentinel_ref()) {
duke@435 365 assert(obj->is_instanceRef(), "should be reference object");
duke@435 366 oop next = java_lang_ref_Reference::discovered(obj);
duke@435 367 if (TraceReferenceGC && PrintGCDetails) {
coleenp@548 368 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
coleenp@548 369 obj, next);
duke@435 370 }
coleenp@548 371 assert(java_lang_ref_Reference::next(obj) == NULL,
coleenp@548 372 "The reference should not be enqueued");
coleenp@548 373 if (next == sentinel_ref()) { // obj is last
duke@435 374 // Swap refs_list into pendling_list_addr and
duke@435 375 // set obj's next to what we read from pending_list_addr.
coleenp@548 376 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
duke@435 377 // Need oop_check on pending_list_addr above;
duke@435 378 // see special oop-check code at the end of
duke@435 379 // enqueue_discovered_reflists() further below.
duke@435 380 if (old == NULL) {
duke@435 381 // obj should be made to point to itself, since
duke@435 382 // pending list was empty.
duke@435 383 java_lang_ref_Reference::set_next(obj, obj);
duke@435 384 } else {
duke@435 385 java_lang_ref_Reference::set_next(obj, old);
duke@435 386 }
duke@435 387 } else {
duke@435 388 java_lang_ref_Reference::set_next(obj, next);
duke@435 389 }
duke@435 390 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 391 obj = next;
duke@435 392 }
duke@435 393 }
duke@435 394
duke@435 395 // Parallel enqueue task
duke@435 396 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
duke@435 397 public:
duke@435 398 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
duke@435 399 DiscoveredList discovered_refs[],
coleenp@548 400 HeapWord* pending_list_addr,
duke@435 401 oop sentinel_ref,
duke@435 402 int n_queues)
duke@435 403 : EnqueueTask(ref_processor, discovered_refs,
duke@435 404 pending_list_addr, sentinel_ref, n_queues)
duke@435 405 { }
duke@435 406
coleenp@548 407 virtual void work(unsigned int work_id) {
duke@435 408 assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds");
duke@435 409 // Simplest first cut: static partitioning.
duke@435 410 int index = work_id;
jmasa@2188 411 // The increment on "index" must correspond to the maximum number of queues
jmasa@2188 412 // (n_queues) with which that ReferenceProcessor was created. That
jmasa@2188 413 // is because of the "clever" way the discovered references lists were
jmasa@2188 414 // allocated and are indexed into. That number is ParallelGCThreads
jmasa@2188 415 // currently. Assert that.
jmasa@2188 416 assert(_n_queues == (int) ParallelGCThreads, "Different number not expected");
jmasa@2188 417 for (int j = 0;
jmasa@2188 418 j < subclasses_of_ref;
jmasa@2188 419 j++, index += _n_queues) {
duke@435 420 _ref_processor.enqueue_discovered_reflist(
duke@435 421 _refs_lists[index], _pending_list_addr);
duke@435 422 _refs_lists[index].set_head(_sentinel_ref);
duke@435 423 _refs_lists[index].set_length(0);
duke@435 424 }
duke@435 425 }
duke@435 426 };
duke@435 427
duke@435 428 // Enqueue references that are not made active again
coleenp@548 429 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
duke@435 430 AbstractRefProcTaskExecutor* task_executor) {
duke@435 431 if (_processing_is_mt && task_executor != NULL) {
duke@435 432 // Parallel code
duke@435 433 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
jmasa@2188 434 pending_list_addr, sentinel_ref(), _max_num_q);
duke@435 435 task_executor->execute(tsk);
duke@435 436 } else {
duke@435 437 // Serial code: call the parent class's implementation
jmasa@2188 438 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 439 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
coleenp@548 440 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 441 _discoveredSoftRefs[i].set_length(0);
duke@435 442 }
duke@435 443 }
duke@435 444 }
duke@435 445
duke@435 446 // Iterator for the list of discovered references.
duke@435 447 class DiscoveredListIterator {
duke@435 448 public:
duke@435 449 inline DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 450 OopClosure* keep_alive,
duke@435 451 BoolObjectClosure* is_alive);
duke@435 452
duke@435 453 // End Of List.
coleenp@548 454 inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); }
duke@435 455
duke@435 456 // Get oop to the Reference object.
coleenp@548 457 inline oop obj() const { return _ref; }
duke@435 458
duke@435 459 // Get oop to the referent object.
coleenp@548 460 inline oop referent() const { return _referent; }
duke@435 461
duke@435 462 // Returns true if referent is alive.
duke@435 463 inline bool is_referent_alive() const;
duke@435 464
duke@435 465 // Loads data for the current reference.
duke@435 466 // The "allow_null_referent" argument tells us to allow for the possibility
duke@435 467 // of a NULL referent in the discovered Reference object. This typically
duke@435 468 // happens in the case of concurrent collectors that may have done the
ysr@887 469 // discovery concurrently, or interleaved, with mutator execution.
duke@435 470 inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
duke@435 471
duke@435 472 // Move to the next discovered reference.
duke@435 473 inline void next();
duke@435 474
ysr@887 475 // Remove the current reference from the list
duke@435 476 inline void remove();
duke@435 477
duke@435 478 // Make the Reference object active again.
duke@435 479 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
duke@435 480
duke@435 481 // Make the referent alive.
coleenp@548 482 inline void make_referent_alive() {
coleenp@548 483 if (UseCompressedOops) {
coleenp@548 484 _keep_alive->do_oop((narrowOop*)_referent_addr);
coleenp@548 485 } else {
coleenp@548 486 _keep_alive->do_oop((oop*)_referent_addr);
coleenp@548 487 }
coleenp@548 488 }
duke@435 489
duke@435 490 // Update the discovered field.
coleenp@548 491 inline void update_discovered() {
coleenp@548 492 // First _prev_next ref actually points into DiscoveredList (gross).
coleenp@548 493 if (UseCompressedOops) {
coleenp@548 494 _keep_alive->do_oop((narrowOop*)_prev_next);
coleenp@548 495 } else {
coleenp@548 496 _keep_alive->do_oop((oop*)_prev_next);
coleenp@548 497 }
coleenp@548 498 }
duke@435 499
duke@435 500 // NULL out referent pointer.
coleenp@548 501 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
duke@435 502
duke@435 503 // Statistics
duke@435 504 NOT_PRODUCT(
duke@435 505 inline size_t processed() const { return _processed; }
duke@435 506 inline size_t removed() const { return _removed; }
duke@435 507 )
duke@435 508
duke@435 509 inline void move_to_next();
duke@435 510
duke@435 511 private:
duke@435 512 DiscoveredList& _refs_list;
coleenp@548 513 HeapWord* _prev_next;
duke@435 514 oop _ref;
coleenp@548 515 HeapWord* _discovered_addr;
duke@435 516 oop _next;
coleenp@548 517 HeapWord* _referent_addr;
duke@435 518 oop _referent;
duke@435 519 OopClosure* _keep_alive;
duke@435 520 BoolObjectClosure* _is_alive;
duke@435 521 DEBUG_ONLY(
duke@435 522 oop _first_seen; // cyclic linked list check
duke@435 523 )
duke@435 524 NOT_PRODUCT(
duke@435 525 size_t _processed;
duke@435 526 size_t _removed;
duke@435 527 )
duke@435 528 };
duke@435 529
duke@435 530 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 531 OopClosure* keep_alive,
duke@435 532 BoolObjectClosure* is_alive)
duke@435 533 : _refs_list(refs_list),
coleenp@548 534 _prev_next(refs_list.adr_head()),
duke@435 535 _ref(refs_list.head()),
duke@435 536 #ifdef ASSERT
duke@435 537 _first_seen(refs_list.head()),
duke@435 538 #endif
duke@435 539 #ifndef PRODUCT
duke@435 540 _processed(0),
duke@435 541 _removed(0),
duke@435 542 #endif
duke@435 543 _next(refs_list.head()),
duke@435 544 _keep_alive(keep_alive),
duke@435 545 _is_alive(is_alive)
duke@435 546 { }
duke@435 547
coleenp@548 548 inline bool DiscoveredListIterator::is_referent_alive() const {
duke@435 549 return _is_alive->do_object_b(_referent);
duke@435 550 }
duke@435 551
coleenp@548 552 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
duke@435 553 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
coleenp@548 554 oop discovered = java_lang_ref_Reference::discovered(_ref);
coleenp@548 555 assert(_discovered_addr && discovered->is_oop_or_null(),
duke@435 556 "discovered field is bad");
coleenp@548 557 _next = discovered;
duke@435 558 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
coleenp@548 559 _referent = java_lang_ref_Reference::referent(_ref);
duke@435 560 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@435 561 "Wrong oop found in java.lang.Reference object");
duke@435 562 assert(allow_null_referent ?
duke@435 563 _referent->is_oop_or_null()
duke@435 564 : _referent->is_oop(),
duke@435 565 "bad referent");
duke@435 566 }
duke@435 567
coleenp@548 568 inline void DiscoveredListIterator::next() {
duke@435 569 _prev_next = _discovered_addr;
duke@435 570 move_to_next();
duke@435 571 }
duke@435 572
coleenp@548 573 inline void DiscoveredListIterator::remove() {
duke@435 574 assert(_ref->is_oop(), "Dropping a bad reference");
coleenp@548 575 oop_store_raw(_discovered_addr, NULL);
coleenp@548 576 // First _prev_next ref actually points into DiscoveredList (gross).
coleenp@548 577 if (UseCompressedOops) {
coleenp@548 578 // Remove Reference object from list.
coleenp@548 579 oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next);
coleenp@548 580 } else {
coleenp@548 581 // Remove Reference object from list.
coleenp@548 582 oopDesc::store_heap_oop((oop*)_prev_next, _next);
coleenp@548 583 }
duke@435 584 NOT_PRODUCT(_removed++);
ysr@887 585 _refs_list.dec_length(1);
duke@435 586 }
duke@435 587
coleenp@548 588 inline void DiscoveredListIterator::move_to_next() {
duke@435 589 _ref = _next;
duke@435 590 assert(_ref != _first_seen, "cyclic ref_list found");
duke@435 591 NOT_PRODUCT(_processed++);
duke@435 592 }
duke@435 593
duke@435 594 // NOTE: process_phase*() are largely similar, and at a high level
duke@435 595 // merely iterate over the extant list applying a predicate to
duke@435 596 // each of its elements and possibly removing that element from the
duke@435 597 // list and applying some further closures to that element.
duke@435 598 // We should consider the possibility of replacing these
duke@435 599 // process_phase*() methods by abstracting them into
duke@435 600 // a single general iterator invocation that receives appropriate
duke@435 601 // closures that accomplish this work.
duke@435 602
duke@435 603 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@435 604 // referents are not alive, but that should be kept alive for policy reasons.
duke@435 605 // Keep alive the transitive closure of all such referents.
duke@435 606 void
coleenp@548 607 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@435 608 ReferencePolicy* policy,
duke@435 609 BoolObjectClosure* is_alive,
duke@435 610 OopClosure* keep_alive,
duke@435 611 VoidClosure* complete_gc) {
duke@435 612 assert(policy != NULL, "Must have a non-NULL policy");
coleenp@548 613 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 614 // Decide which softly reachable refs should be kept alive.
duke@435 615 while (iter.has_next()) {
duke@435 616 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@435 617 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
duke@435 618 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
duke@435 619 if (TraceReferenceGC) {
duke@435 620 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
coleenp@548 621 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 622 }
ysr@887 623 // Remove Reference object from list
ysr@887 624 iter.remove();
duke@435 625 // Make the Reference object active again
duke@435 626 iter.make_active();
duke@435 627 // keep the referent around
duke@435 628 iter.make_referent_alive();
ysr@887 629 iter.move_to_next();
duke@435 630 } else {
duke@435 631 iter.next();
duke@435 632 }
duke@435 633 }
duke@435 634 // Close the reachable set
duke@435 635 complete_gc->do_void();
duke@435 636 NOT_PRODUCT(
duke@435 637 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 638 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
jmasa@2188 639 "discovered Refs by policy list " INTPTR_FORMAT,
jmasa@2188 640 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 641 }
duke@435 642 )
duke@435 643 }
duke@435 644
duke@435 645 // Traverse the list and remove any Refs that are not active, or
duke@435 646 // whose referents are either alive or NULL.
duke@435 647 void
coleenp@548 648 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@435 649 BoolObjectClosure* is_alive,
coleenp@548 650 OopClosure* keep_alive) {
duke@435 651 assert(discovery_is_atomic(), "Error");
coleenp@548 652 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 653 while (iter.has_next()) {
duke@435 654 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@548 655 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@548 656 assert(next == NULL, "Should not discover inactive Reference");
duke@435 657 if (iter.is_referent_alive()) {
duke@435 658 if (TraceReferenceGC) {
duke@435 659 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
coleenp@548 660 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 661 }
duke@435 662 // The referent is reachable after all.
ysr@887 663 // Remove Reference object from list.
ysr@887 664 iter.remove();
duke@435 665 // Update the referent pointer as necessary: Note that this
duke@435 666 // should not entail any recursive marking because the
duke@435 667 // referent must already have been traversed.
duke@435 668 iter.make_referent_alive();
ysr@887 669 iter.move_to_next();
duke@435 670 } else {
duke@435 671 iter.next();
duke@435 672 }
duke@435 673 }
duke@435 674 NOT_PRODUCT(
duke@435 675 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 676 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 677 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 678 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 679 }
duke@435 680 )
duke@435 681 }
duke@435 682
duke@435 683 void
coleenp@548 684 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@548 685 BoolObjectClosure* is_alive,
coleenp@548 686 OopClosure* keep_alive,
coleenp@548 687 VoidClosure* complete_gc) {
duke@435 688 assert(!discovery_is_atomic(), "Error");
coleenp@548 689 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 690 while (iter.has_next()) {
duke@435 691 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 692 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
coleenp@548 693 oop next = java_lang_ref_Reference::next(iter.obj());
duke@435 694 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 695 next != NULL)) {
coleenp@548 696 assert(next->is_oop_or_null(), "bad next field");
duke@435 697 // Remove Reference object from list
duke@435 698 iter.remove();
duke@435 699 // Trace the cohorts
duke@435 700 iter.make_referent_alive();
coleenp@548 701 if (UseCompressedOops) {
coleenp@548 702 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@548 703 } else {
coleenp@548 704 keep_alive->do_oop((oop*)next_addr);
coleenp@548 705 }
ysr@887 706 iter.move_to_next();
duke@435 707 } else {
duke@435 708 iter.next();
duke@435 709 }
duke@435 710 }
duke@435 711 // Now close the newly reachable set
duke@435 712 complete_gc->do_void();
duke@435 713 NOT_PRODUCT(
duke@435 714 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 715 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 716 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 717 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 718 }
duke@435 719 )
duke@435 720 }
duke@435 721
duke@435 722 // Traverse the list and process the referents, by either
coleenp@548 723 // clearing them or keeping them (and their reachable
duke@435 724 // closure) alive.
duke@435 725 void
coleenp@548 726 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
duke@435 727 bool clear_referent,
duke@435 728 BoolObjectClosure* is_alive,
duke@435 729 OopClosure* keep_alive,
duke@435 730 VoidClosure* complete_gc) {
jmasa@2188 731 ResourceMark rm;
coleenp@548 732 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 733 while (iter.has_next()) {
duke@435 734 iter.update_discovered();
duke@435 735 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@435 736 if (clear_referent) {
duke@435 737 // NULL out referent pointer
duke@435 738 iter.clear_referent();
duke@435 739 } else {
duke@435 740 // keep the referent around
duke@435 741 iter.make_referent_alive();
duke@435 742 }
duke@435 743 if (TraceReferenceGC) {
duke@435 744 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
duke@435 745 clear_referent ? "cleared " : "",
coleenp@548 746 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 747 }
duke@435 748 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
duke@435 749 iter.next();
duke@435 750 }
duke@435 751 // Remember to keep sentinel pointer around
duke@435 752 iter.update_discovered();
duke@435 753 // Close the reachable set
duke@435 754 complete_gc->do_void();
duke@435 755 }
duke@435 756
duke@435 757 void
coleenp@548 758 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
coleenp@548 759 oop obj = refs_list.head();
coleenp@548 760 while (obj != sentinel_ref()) {
coleenp@548 761 oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 762 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
coleenp@548 763 obj = discovered;
duke@435 764 }
coleenp@548 765 refs_list.set_head(sentinel_ref());
coleenp@548 766 refs_list.set_length(0);
duke@435 767 }
duke@435 768
ysr@777 769 void ReferenceProcessor::abandon_partial_discovery() {
ysr@777 770 // loop over the lists
jmasa@2188 771 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
jmasa@2188 772 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
johnc@2316 773 gclog_or_tty->print_cr("\nAbandoning %s discovered list",
johnc@2316 774 list_name(i));
ysr@777 775 }
ysr@777 776 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
duke@435 777 }
duke@435 778 }
duke@435 779
duke@435 780 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 781 public:
duke@435 782 RefProcPhase1Task(ReferenceProcessor& ref_processor,
duke@435 783 DiscoveredList refs_lists[],
duke@435 784 ReferencePolicy* policy,
duke@435 785 bool marks_oops_alive)
duke@435 786 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 787 _policy(policy)
duke@435 788 { }
duke@435 789 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 790 OopClosure& keep_alive,
duke@435 791 VoidClosure& complete_gc)
duke@435 792 {
jmasa@2188 793 Thread* thr = Thread::current();
jmasa@2188 794 int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 795 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
duke@435 796 &is_alive, &keep_alive, &complete_gc);
duke@435 797 }
duke@435 798 private:
duke@435 799 ReferencePolicy* _policy;
duke@435 800 };
duke@435 801
duke@435 802 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 803 public:
duke@435 804 RefProcPhase2Task(ReferenceProcessor& ref_processor,
duke@435 805 DiscoveredList refs_lists[],
duke@435 806 bool marks_oops_alive)
duke@435 807 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
duke@435 808 { }
duke@435 809 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 810 OopClosure& keep_alive,
duke@435 811 VoidClosure& complete_gc)
duke@435 812 {
duke@435 813 _ref_processor.process_phase2(_refs_lists[i],
duke@435 814 &is_alive, &keep_alive, &complete_gc);
duke@435 815 }
duke@435 816 };
duke@435 817
duke@435 818 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 819 public:
duke@435 820 RefProcPhase3Task(ReferenceProcessor& ref_processor,
duke@435 821 DiscoveredList refs_lists[],
duke@435 822 bool clear_referent,
duke@435 823 bool marks_oops_alive)
duke@435 824 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 825 _clear_referent(clear_referent)
duke@435 826 { }
duke@435 827 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 828 OopClosure& keep_alive,
duke@435 829 VoidClosure& complete_gc)
duke@435 830 {
jmasa@2188 831 // Don't use "refs_list_index" calculated in this way because
jmasa@2188 832 // balance_queues() has moved the Ref's into the first n queues.
jmasa@2188 833 // Thread* thr = Thread::current();
jmasa@2188 834 // int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 835 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
duke@435 836 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@435 837 &is_alive, &keep_alive, &complete_gc);
duke@435 838 }
duke@435 839 private:
duke@435 840 bool _clear_referent;
duke@435 841 };
duke@435 842
duke@435 843 // Balances reference queues.
jmasa@2188 844 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
jmasa@2188 845 // queues[0, 1, ..., _num_q-1] because only the first _num_q
jmasa@2188 846 // corresponding to the active workers will be processed.
duke@435 847 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@435 848 {
duke@435 849 // calculate total length
duke@435 850 size_t total_refs = 0;
jmasa@2188 851 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 852 gclog_or_tty->print_cr("\nBalance ref_lists ");
jmasa@2188 853 }
jmasa@2188 854
jmasa@2188 855 for (int i = 0; i < _max_num_q; ++i) {
duke@435 856 total_refs += ref_lists[i].length();
jmasa@2188 857 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 858 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 859 }
jmasa@2188 860 }
jmasa@2188 861 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 862 gclog_or_tty->print_cr(" = %d", total_refs);
duke@435 863 }
duke@435 864 size_t avg_refs = total_refs / _num_q + 1;
duke@435 865 int to_idx = 0;
jmasa@2188 866 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
jmasa@2188 867 bool move_all = false;
jmasa@2188 868 if (from_idx >= _num_q) {
jmasa@2188 869 move_all = ref_lists[from_idx].length() > 0;
jmasa@2188 870 }
jmasa@2188 871 while ((ref_lists[from_idx].length() > avg_refs) ||
jmasa@2188 872 move_all) {
duke@435 873 assert(to_idx < _num_q, "Sanity Check!");
duke@435 874 if (ref_lists[to_idx].length() < avg_refs) {
duke@435 875 // move superfluous refs
jmasa@2188 876 size_t refs_to_move;
jmasa@2188 877 // Move all the Ref's if the from queue will not be processed.
jmasa@2188 878 if (move_all) {
jmasa@2188 879 refs_to_move = MIN2(ref_lists[from_idx].length(),
jmasa@2188 880 avg_refs - ref_lists[to_idx].length());
jmasa@2188 881 } else {
jmasa@2188 882 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
jmasa@2188 883 avg_refs - ref_lists[to_idx].length());
jmasa@2188 884 }
duke@435 885 oop move_head = ref_lists[from_idx].head();
duke@435 886 oop move_tail = move_head;
duke@435 887 oop new_head = move_head;
duke@435 888 // find an element to split the list on
duke@435 889 for (size_t j = 0; j < refs_to_move; ++j) {
duke@435 890 move_tail = new_head;
coleenp@548 891 new_head = java_lang_ref_Reference::discovered(new_head);
duke@435 892 }
duke@435 893 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
duke@435 894 ref_lists[to_idx].set_head(move_head);
ysr@887 895 ref_lists[to_idx].inc_length(refs_to_move);
duke@435 896 ref_lists[from_idx].set_head(new_head);
ysr@887 897 ref_lists[from_idx].dec_length(refs_to_move);
jmasa@2188 898 if (ref_lists[from_idx].length() == 0) {
jmasa@2188 899 break;
jmasa@2188 900 }
duke@435 901 } else {
jmasa@2188 902 to_idx = (to_idx + 1) % _num_q;
duke@435 903 }
duke@435 904 }
duke@435 905 }
jmasa@2188 906 #ifdef ASSERT
jmasa@2188 907 size_t balanced_total_refs = 0;
jmasa@2188 908 for (int i = 0; i < _max_num_q; ++i) {
jmasa@2188 909 balanced_total_refs += ref_lists[i].length();
jmasa@2188 910 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 911 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 912 }
jmasa@2188 913 }
jmasa@2188 914 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 915 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
jmasa@2188 916 gclog_or_tty->flush();
jmasa@2188 917 }
jmasa@2188 918 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
jmasa@2188 919 #endif
jmasa@2188 920 }
jmasa@2188 921
jmasa@2188 922 void ReferenceProcessor::balance_all_queues() {
jmasa@2188 923 balance_queues(_discoveredSoftRefs);
jmasa@2188 924 balance_queues(_discoveredWeakRefs);
jmasa@2188 925 balance_queues(_discoveredFinalRefs);
jmasa@2188 926 balance_queues(_discoveredPhantomRefs);
duke@435 927 }
duke@435 928
duke@435 929 void
duke@435 930 ReferenceProcessor::process_discovered_reflist(
duke@435 931 DiscoveredList refs_lists[],
duke@435 932 ReferencePolicy* policy,
duke@435 933 bool clear_referent,
duke@435 934 BoolObjectClosure* is_alive,
duke@435 935 OopClosure* keep_alive,
duke@435 936 VoidClosure* complete_gc,
duke@435 937 AbstractRefProcTaskExecutor* task_executor)
duke@435 938 {
jmasa@2188 939 bool mt_processing = task_executor != NULL && _processing_is_mt;
jmasa@2188 940 // If discovery used MT and a dynamic number of GC threads, then
jmasa@2188 941 // the queues must be balanced for correctness if fewer than the
jmasa@2188 942 // maximum number of queues were used. The number of queue used
jmasa@2188 943 // during discovery may be different than the number to be used
jmasa@2188 944 // for processing so don't depend of _num_q < _max_num_q as part
jmasa@2188 945 // of the test.
jmasa@2188 946 bool must_balance = _discovery_is_mt;
jmasa@2188 947
jmasa@2188 948 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
jmasa@2188 949 must_balance) {
duke@435 950 balance_queues(refs_lists);
duke@435 951 }
duke@435 952 if (PrintReferenceGC && PrintGCDetails) {
duke@435 953 size_t total = 0;
duke@435 954 for (int i = 0; i < _num_q; ++i) {
duke@435 955 total += refs_lists[i].length();
duke@435 956 }
duke@435 957 gclog_or_tty->print(", %u refs", total);
duke@435 958 }
duke@435 959
duke@435 960 // Phase 1 (soft refs only):
duke@435 961 // . Traverse the list and remove any SoftReferences whose
duke@435 962 // referents are not alive, but that should be kept alive for
duke@435 963 // policy reasons. Keep alive the transitive closure of all
duke@435 964 // such referents.
duke@435 965 if (policy != NULL) {
jmasa@2188 966 if (mt_processing) {
duke@435 967 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
duke@435 968 task_executor->execute(phase1);
duke@435 969 } else {
duke@435 970 for (int i = 0; i < _num_q; i++) {
duke@435 971 process_phase1(refs_lists[i], policy,
duke@435 972 is_alive, keep_alive, complete_gc);
duke@435 973 }
duke@435 974 }
duke@435 975 } else { // policy == NULL
duke@435 976 assert(refs_lists != _discoveredSoftRefs,
duke@435 977 "Policy must be specified for soft references.");
duke@435 978 }
duke@435 979
duke@435 980 // Phase 2:
duke@435 981 // . Traverse the list and remove any refs whose referents are alive.
jmasa@2188 982 if (mt_processing) {
duke@435 983 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
duke@435 984 task_executor->execute(phase2);
duke@435 985 } else {
duke@435 986 for (int i = 0; i < _num_q; i++) {
duke@435 987 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
duke@435 988 }
duke@435 989 }
duke@435 990
duke@435 991 // Phase 3:
duke@435 992 // . Traverse the list and process referents as appropriate.
jmasa@2188 993 if (mt_processing) {
duke@435 994 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
duke@435 995 task_executor->execute(phase3);
duke@435 996 } else {
duke@435 997 for (int i = 0; i < _num_q; i++) {
duke@435 998 process_phase3(refs_lists[i], clear_referent,
duke@435 999 is_alive, keep_alive, complete_gc);
duke@435 1000 }
duke@435 1001 }
duke@435 1002 }
duke@435 1003
duke@435 1004 void ReferenceProcessor::clean_up_discovered_references() {
duke@435 1005 // loop over the lists
jmasa@2188 1006 // Should this instead be
jmasa@2188 1007 // for (int i = 0; i < subclasses_of_ref; i++_ {
jmasa@2188 1008 // for (int j = 0; j < _num_q; j++) {
jmasa@2188 1009 // int index = i * _max_num_q + j;
jmasa@2188 1010 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 1011 if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
duke@435 1012 gclog_or_tty->print_cr(
duke@435 1013 "\nScrubbing %s discovered list of Null referents",
duke@435 1014 list_name(i));
duke@435 1015 }
duke@435 1016 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
duke@435 1017 }
duke@435 1018 }
duke@435 1019
duke@435 1020 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
duke@435 1021 assert(!discovery_is_atomic(), "Else why call this method?");
duke@435 1022 DiscoveredListIterator iter(refs_list, NULL, NULL);
duke@435 1023 while (iter.has_next()) {
duke@435 1024 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1025 oop next = java_lang_ref_Reference::next(iter.obj());
coleenp@548 1026 assert(next->is_oop_or_null(), "bad next field");
duke@435 1027 // If referent has been cleared or Reference is not active,
duke@435 1028 // drop it.
coleenp@548 1029 if (iter.referent() == NULL || next != NULL) {
duke@435 1030 debug_only(
duke@435 1031 if (PrintGCDetails && TraceReferenceGC) {
duke@435 1032 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
duke@435 1033 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
duke@435 1034 " and referent: " INTPTR_FORMAT,
coleenp@548 1035 iter.obj(), next, iter.referent());
duke@435 1036 }
duke@435 1037 )
duke@435 1038 // Remove Reference object from list
duke@435 1039 iter.remove();
ysr@887 1040 iter.move_to_next();
duke@435 1041 } else {
duke@435 1042 iter.next();
duke@435 1043 }
duke@435 1044 }
duke@435 1045 NOT_PRODUCT(
duke@435 1046 if (PrintGCDetails && TraceReferenceGC) {
duke@435 1047 gclog_or_tty->print(
duke@435 1048 " Removed %d Refs with NULL referents out of %d discovered Refs",
duke@435 1049 iter.removed(), iter.processed());
duke@435 1050 }
duke@435 1051 )
duke@435 1052 }
duke@435 1053
duke@435 1054 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
duke@435 1055 int id = 0;
duke@435 1056 // Determine the queue index to use for this object.
duke@435 1057 if (_discovery_is_mt) {
duke@435 1058 // During a multi-threaded discovery phase,
duke@435 1059 // each thread saves to its "own" list.
duke@435 1060 Thread* thr = Thread::current();
johnc@2316 1061 id = thr->as_Worker_thread()->id();
duke@435 1062 } else {
duke@435 1063 // single-threaded discovery, we save in round-robin
duke@435 1064 // fashion to each of the lists.
duke@435 1065 if (_processing_is_mt) {
duke@435 1066 id = next_id();
duke@435 1067 }
duke@435 1068 }
jmasa@2188 1069 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
duke@435 1070
duke@435 1071 // Get the discovered queue to which we will add
duke@435 1072 DiscoveredList* list = NULL;
duke@435 1073 switch (rt) {
duke@435 1074 case REF_OTHER:
duke@435 1075 // Unknown reference type, no special treatment
duke@435 1076 break;
duke@435 1077 case REF_SOFT:
duke@435 1078 list = &_discoveredSoftRefs[id];
duke@435 1079 break;
duke@435 1080 case REF_WEAK:
duke@435 1081 list = &_discoveredWeakRefs[id];
duke@435 1082 break;
duke@435 1083 case REF_FINAL:
duke@435 1084 list = &_discoveredFinalRefs[id];
duke@435 1085 break;
duke@435 1086 case REF_PHANTOM:
duke@435 1087 list = &_discoveredPhantomRefs[id];
duke@435 1088 break;
duke@435 1089 case REF_NONE:
duke@435 1090 // we should not reach here if we are an instanceRefKlass
duke@435 1091 default:
duke@435 1092 ShouldNotReachHere();
duke@435 1093 }
jmasa@2188 1094 if (TraceReferenceGC && PrintGCDetails) {
johnc@2316 1095 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
jmasa@2188 1096 }
duke@435 1097 return list;
duke@435 1098 }
duke@435 1099
coleenp@548 1100 inline void
coleenp@548 1101 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@548 1102 oop obj,
coleenp@548 1103 HeapWord* discovered_addr) {
duke@435 1104 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@435 1105 // First we must make sure this object is only enqueued once. CAS in a non null
duke@435 1106 // discovered_addr.
ysr@777 1107 oop current_head = refs_list.head();
ysr@777 1108
ysr@1280 1109 // Note: In the case of G1, this specific pre-barrier is strictly
ysr@777 1110 // not necessary because the only case we are interested in
ysr@1280 1111 // here is when *discovered_addr is NULL (see the CAS further below),
ysr@1280 1112 // so this will expand to nothing. As a result, we have manually
ysr@1280 1113 // elided this out for G1, but left in the test for some future
ysr@1280 1114 // collector that might have need for a pre-barrier here.
ysr@777 1115 if (_discovered_list_needs_barrier && !UseG1GC) {
ysr@1280 1116 if (UseCompressedOops) {
ysr@1280 1117 _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head);
ysr@1280 1118 } else {
ysr@1280 1119 _bs->write_ref_field_pre((oop*)discovered_addr, current_head);
ysr@1280 1120 }
ysr@1280 1121 guarantee(false, "Need to check non-G1 collector");
ysr@777 1122 }
ysr@777 1123 oop retest = oopDesc::atomic_compare_exchange_oop(current_head, discovered_addr,
coleenp@548 1124 NULL);
duke@435 1125 if (retest == NULL) {
duke@435 1126 // This thread just won the right to enqueue the object.
duke@435 1127 // We have separate lists for enqueueing so no synchronization
duke@435 1128 // is necessary.
coleenp@548 1129 refs_list.set_head(obj);
ysr@887 1130 refs_list.inc_length(1);
ysr@777 1131 if (_discovered_list_needs_barrier) {
ysr@1280 1132 _bs->write_ref_field((void*)discovered_addr, current_head);
ysr@777 1133 }
johnc@2316 1134
johnc@2316 1135 if (TraceReferenceGC) {
johnc@2316 1136 gclog_or_tty->print_cr("Enqueued reference (mt) (" INTPTR_FORMAT ": %s)",
johnc@2316 1137 obj, obj->blueprint()->internal_name());
johnc@2316 1138 }
duke@435 1139 } else {
duke@435 1140 // If retest was non NULL, another thread beat us to it:
duke@435 1141 // The reference has already been discovered...
duke@435 1142 if (TraceReferenceGC) {
duke@435 1143 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
duke@435 1144 obj, obj->blueprint()->internal_name());
duke@435 1145 }
duke@435 1146 }
duke@435 1147 }
duke@435 1148
duke@435 1149 // We mention two of several possible choices here:
duke@435 1150 // #0: if the reference object is not in the "originating generation"
duke@435 1151 // (or part of the heap being collected, indicated by our "span"
duke@435 1152 // we don't treat it specially (i.e. we scan it as we would
duke@435 1153 // a normal oop, treating its references as strong references).
duke@435 1154 // This means that references can't be enqueued unless their
duke@435 1155 // referent is also in the same span. This is the simplest,
duke@435 1156 // most "local" and most conservative approach, albeit one
duke@435 1157 // that may cause weak references to be enqueued least promptly.
duke@435 1158 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@435 1159 // #1: the reference object may be in any generation (span), but if
duke@435 1160 // the referent is in the generation (span) being currently collected
duke@435 1161 // then we can discover the reference object, provided
duke@435 1162 // the object has not already been discovered by
duke@435 1163 // a different concurrently running collector (as may be the
duke@435 1164 // case, for instance, if the reference object is in CMS and
duke@435 1165 // the referent in DefNewGeneration), and provided the processing
duke@435 1166 // of this reference object by the current collector will
duke@435 1167 // appear atomic to every other collector in the system.
duke@435 1168 // (Thus, for instance, a concurrent collector may not
duke@435 1169 // discover references in other generations even if the
duke@435 1170 // referent is in its own generation). This policy may,
duke@435 1171 // in certain cases, enqueue references somewhat sooner than
duke@435 1172 // might Policy #0 above, but at marginally increased cost
duke@435 1173 // and complexity in processing these references.
duke@435 1174 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@435 1175 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
duke@435 1176 // We enqueue references only if we are discovering refs
duke@435 1177 // (rather than processing discovered refs).
duke@435 1178 if (!_discovering_refs || !RegisterReferences) {
duke@435 1179 return false;
duke@435 1180 }
duke@435 1181 // We only enqueue active references.
coleenp@548 1182 oop next = java_lang_ref_Reference::next(obj);
coleenp@548 1183 if (next != NULL) {
duke@435 1184 return false;
duke@435 1185 }
duke@435 1186
duke@435 1187 HeapWord* obj_addr = (HeapWord*)obj;
duke@435 1188 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1189 !_span.contains(obj_addr)) {
duke@435 1190 // Reference is not in the originating generation;
duke@435 1191 // don't treat it specially (i.e. we want to scan it as a normal
duke@435 1192 // object with strong references).
duke@435 1193 return false;
duke@435 1194 }
duke@435 1195
duke@435 1196 // We only enqueue references whose referents are not (yet) strongly
duke@435 1197 // reachable.
duke@435 1198 if (is_alive_non_header() != NULL) {
duke@435 1199 oop referent = java_lang_ref_Reference::referent(obj);
ysr@888 1200 // In the case of non-concurrent discovery, the last
ysr@888 1201 // disjunct below should hold. It may not hold in the
ysr@888 1202 // case of concurrent discovery because mutators may
ysr@888 1203 // concurrently clear() a Reference.
ysr@888 1204 assert(UseConcMarkSweepGC || UseG1GC || referent != NULL,
ysr@888 1205 "Refs with null referents already filtered");
duke@435 1206 if (is_alive_non_header()->do_object_b(referent)) {
duke@435 1207 return false; // referent is reachable
duke@435 1208 }
duke@435 1209 }
ysr@888 1210 if (rt == REF_SOFT) {
ysr@888 1211 // For soft refs we can decide now if these are not
ysr@888 1212 // current candidates for clearing, in which case we
ysr@888 1213 // can mark through them now, rather than delaying that
ysr@888 1214 // to the reference-processing phase. Since all current
ysr@888 1215 // time-stamp policies advance the soft-ref clock only
ysr@888 1216 // at a major collection cycle, this is always currently
ysr@888 1217 // accurate.
ysr@888 1218 if (!_current_soft_ref_policy->should_clear_reference(obj)) {
ysr@888 1219 return false;
ysr@888 1220 }
ysr@888 1221 }
duke@435 1222
ysr@777 1223 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
ysr@777 1224 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 1225 assert(discovered->is_oop_or_null(), "bad discovered field");
coleenp@548 1226 if (discovered != NULL) {
duke@435 1227 // The reference has already been discovered...
duke@435 1228 if (TraceReferenceGC) {
duke@435 1229 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1230 obj, obj->blueprint()->internal_name());
duke@435 1231 }
duke@435 1232 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1233 // assumes that an object is not processed twice;
duke@435 1234 // if it's been already discovered it must be on another
duke@435 1235 // generation's discovered list; so we won't discover it.
duke@435 1236 return false;
duke@435 1237 } else {
duke@435 1238 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@435 1239 "Unrecognized policy");
duke@435 1240 // Check assumption that an object is not potentially
duke@435 1241 // discovered twice except by concurrent collectors that potentially
duke@435 1242 // trace the same Reference object twice.
johnc@2316 1243 assert(UseConcMarkSweepGC || UseG1GC,
johnc@2316 1244 "Only possible with a concurrent marking collector");
duke@435 1245 return true;
duke@435 1246 }
duke@435 1247 }
duke@435 1248
duke@435 1249 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1250 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 1251 assert(referent->is_oop(), "bad referent");
duke@435 1252 // enqueue if and only if either:
duke@435 1253 // reference is in our span or
duke@435 1254 // we are an atomic collector and referent is in our span
duke@435 1255 if (_span.contains(obj_addr) ||
duke@435 1256 (discovery_is_atomic() && _span.contains(referent))) {
duke@435 1257 // should_enqueue = true;
duke@435 1258 } else {
duke@435 1259 return false;
duke@435 1260 }
duke@435 1261 } else {
duke@435 1262 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1263 _span.contains(obj_addr), "code inconsistency");
duke@435 1264 }
duke@435 1265
duke@435 1266 // Get the right type of discovered queue head.
duke@435 1267 DiscoveredList* list = get_discovered_list(rt);
duke@435 1268 if (list == NULL) {
duke@435 1269 return false; // nothing special needs to be done
duke@435 1270 }
duke@435 1271
duke@435 1272 if (_discovery_is_mt) {
duke@435 1273 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@435 1274 } else {
ysr@777 1275 // If "_discovered_list_needs_barrier", we do write barriers when
ysr@777 1276 // updating the discovered reference list. Otherwise, we do a raw store
ysr@777 1277 // here: the field will be visited later when processing the discovered
ysr@777 1278 // references.
ysr@777 1279 oop current_head = list->head();
ysr@777 1280 // As in the case further above, since we are over-writing a NULL
ysr@777 1281 // pre-value, we can safely elide the pre-barrier here for the case of G1.
ysr@777 1282 assert(discovered == NULL, "control point invariant");
ysr@777 1283 if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1
ysr@1280 1284 if (UseCompressedOops) {
ysr@1280 1285 _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head);
ysr@1280 1286 } else {
ysr@1280 1287 _bs->write_ref_field_pre((oop*)discovered_addr, current_head);
ysr@1280 1288 }
ysr@1280 1289 guarantee(false, "Need to check non-G1 collector");
ysr@777 1290 }
ysr@777 1291 oop_store_raw(discovered_addr, current_head);
ysr@777 1292 if (_discovered_list_needs_barrier) {
ysr@1280 1293 _bs->write_ref_field((void*)discovered_addr, current_head);
ysr@777 1294 }
duke@435 1295 list->set_head(obj);
ysr@887 1296 list->inc_length(1);
duke@435 1297
johnc@2316 1298 if (TraceReferenceGC) {
duke@435 1299 gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)",
johnc@2316 1300 obj, obj->blueprint()->internal_name());
duke@435 1301 }
duke@435 1302 }
duke@435 1303 assert(obj->is_oop(), "Enqueued a bad reference");
johnc@2316 1304 assert(java_lang_ref_Reference::referent(obj)->is_oop(), "Enqueued a bad referent");
duke@435 1305 return true;
duke@435 1306 }
duke@435 1307
duke@435 1308 // Preclean the discovered references by removing those
duke@435 1309 // whose referents are alive, and by marking from those that
duke@435 1310 // are not active. These lists can be handled here
duke@435 1311 // in any order and, indeed, concurrently.
duke@435 1312 void ReferenceProcessor::preclean_discovered_references(
duke@435 1313 BoolObjectClosure* is_alive,
duke@435 1314 OopClosure* keep_alive,
duke@435 1315 VoidClosure* complete_gc,
jmasa@1625 1316 YieldClosure* yield,
jmasa@1625 1317 bool should_unload_classes) {
duke@435 1318
duke@435 1319 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 1320
jmasa@1370 1321 #ifdef ASSERT
jmasa@1370 1322 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
jmasa@1625 1323 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
jmasa@1625 1324 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
jmasa@1625 1325 UseConcMarkSweepGC && should_unload_classes;
jmasa@1370 1326 RememberKlassesChecker mx(must_remember_klasses);
jmasa@1370 1327 #endif
duke@435 1328 // Soft references
duke@435 1329 {
duke@435 1330 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1331 false, gclog_or_tty);
jmasa@2188 1332 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1333 if (yield->should_return()) {
ysr@887 1334 return;
ysr@887 1335 }
duke@435 1336 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@435 1337 keep_alive, complete_gc, yield);
duke@435 1338 }
duke@435 1339 }
duke@435 1340
duke@435 1341 // Weak references
duke@435 1342 {
duke@435 1343 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1344 false, gclog_or_tty);
duke@435 1345 for (int i = 0; i < _num_q; i++) {
ysr@887 1346 if (yield->should_return()) {
ysr@887 1347 return;
ysr@887 1348 }
duke@435 1349 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@435 1350 keep_alive, complete_gc, yield);
duke@435 1351 }
duke@435 1352 }
duke@435 1353
duke@435 1354 // Final references
duke@435 1355 {
duke@435 1356 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1357 false, gclog_or_tty);
duke@435 1358 for (int i = 0; i < _num_q; i++) {
ysr@887 1359 if (yield->should_return()) {
ysr@887 1360 return;
ysr@887 1361 }
duke@435 1362 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@435 1363 keep_alive, complete_gc, yield);
duke@435 1364 }
duke@435 1365 }
duke@435 1366
duke@435 1367 // Phantom references
duke@435 1368 {
duke@435 1369 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1370 false, gclog_or_tty);
duke@435 1371 for (int i = 0; i < _num_q; i++) {
ysr@887 1372 if (yield->should_return()) {
ysr@887 1373 return;
ysr@887 1374 }
duke@435 1375 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@435 1376 keep_alive, complete_gc, yield);
duke@435 1377 }
duke@435 1378 }
duke@435 1379 }
duke@435 1380
duke@435 1381 // Walk the given discovered ref list, and remove all reference objects
duke@435 1382 // whose referents are still alive, whose referents are NULL or which
ysr@887 1383 // are not active (have a non-NULL next field). NOTE: When we are
ysr@887 1384 // thus precleaning the ref lists (which happens single-threaded today),
ysr@887 1385 // we do not disable refs discovery to honour the correct semantics of
ysr@887 1386 // java.lang.Reference. As a result, we need to be careful below
ysr@887 1387 // that ref removal steps interleave safely with ref discovery steps
ysr@887 1388 // (in this thread).
coleenp@548 1389 void
coleenp@548 1390 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 1391 BoolObjectClosure* is_alive,
coleenp@548 1392 OopClosure* keep_alive,
coleenp@548 1393 VoidClosure* complete_gc,
coleenp@548 1394 YieldClosure* yield) {
duke@435 1395 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 1396 while (iter.has_next()) {
duke@435 1397 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1398 oop obj = iter.obj();
coleenp@548 1399 oop next = java_lang_ref_Reference::next(obj);
duke@435 1400 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 1401 next != NULL) {
duke@435 1402 // The referent has been cleared, or is alive, or the Reference is not
duke@435 1403 // active; we need to trace and mark its cohort.
duke@435 1404 if (TraceReferenceGC) {
duke@435 1405 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
duke@435 1406 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 1407 }
duke@435 1408 // Remove Reference object from list
duke@435 1409 iter.remove();
duke@435 1410 // Keep alive its cohort.
duke@435 1411 iter.make_referent_alive();
coleenp@548 1412 if (UseCompressedOops) {
coleenp@548 1413 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1414 keep_alive->do_oop(next_addr);
coleenp@548 1415 } else {
coleenp@548 1416 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1417 keep_alive->do_oop(next_addr);
coleenp@548 1418 }
ysr@887 1419 iter.move_to_next();
duke@435 1420 } else {
duke@435 1421 iter.next();
duke@435 1422 }
duke@435 1423 }
duke@435 1424 // Close the reachable set
duke@435 1425 complete_gc->do_void();
duke@435 1426
duke@435 1427 NOT_PRODUCT(
duke@435 1428 if (PrintGCDetails && PrintReferenceGC) {
jmasa@2188 1429 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
jmasa@2188 1430 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 1431 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 1432 }
duke@435 1433 )
duke@435 1434 }
duke@435 1435
duke@435 1436 const char* ReferenceProcessor::list_name(int i) {
jmasa@2188 1437 assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
jmasa@2188 1438 int j = i / _max_num_q;
duke@435 1439 switch (j) {
duke@435 1440 case 0: return "SoftRef";
duke@435 1441 case 1: return "WeakRef";
duke@435 1442 case 2: return "FinalRef";
duke@435 1443 case 3: return "PhantomRef";
duke@435 1444 }
duke@435 1445 ShouldNotReachHere();
duke@435 1446 return NULL;
duke@435 1447 }
duke@435 1448
duke@435 1449 #ifndef PRODUCT
duke@435 1450 void ReferenceProcessor::verify_ok_to_handle_reflists() {
duke@435 1451 // empty for now
duke@435 1452 }
duke@435 1453 #endif
duke@435 1454
duke@435 1455 void ReferenceProcessor::verify() {
coleenp@548 1456 guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
duke@435 1457 }
duke@435 1458
duke@435 1459 #ifndef PRODUCT
duke@435 1460 void ReferenceProcessor::clear_discovered_references() {
duke@435 1461 guarantee(!_discovering_refs, "Discovering refs?");
jmasa@2188 1462 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 1463 oop obj = _discoveredSoftRefs[i].head();
coleenp@548 1464 while (obj != sentinel_ref()) {
duke@435 1465 oop next = java_lang_ref_Reference::discovered(obj);
duke@435 1466 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 1467 obj = next;
duke@435 1468 }
coleenp@548 1469 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 1470 _discoveredSoftRefs[i].set_length(0);
duke@435 1471 }
duke@435 1472 }
duke@435 1473 #endif // PRODUCT

mercurial