src/share/vm/memory/referenceProcessor.cpp

Thu, 20 Nov 2008 12:27:41 -0800

author
ysr
date
Thu, 20 Nov 2008 12:27:41 -0800
changeset 887
00b023ae2d78
parent 791
1ee8caae33af
child 888
c96030fff130
permissions
-rw-r--r--

6722113: CMS: Incorrect overflow handling during precleaning of Reference lists
Summary: When we encounter marking stack overflow during precleaning of Reference lists, we were using the overflow list mechanism, which can cause problems on account of mutating the mark word of the header because of conflicts with mutator accesses and updates of that field. Instead we should use the usual mechanism for overflow handling in concurrent phases, namely dirtying of the card on which the overflowed object lies. Since precleaning effectively does a form of discovered list processing, albeit with discovery enabled, we needed to adjust some code to be correct in the face of interleaved processing and discovery.
Reviewed-by: apetrusenko, jcoomes

duke@435 1 /*
xdono@631 2 * Copyright 2001-2008 Sun Microsystems, Inc. All Rights Reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
duke@435 19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
duke@435 20 * CA 95054 USA or visit www.sun.com if you need additional information or
duke@435 21 * have any questions.
duke@435 22 *
duke@435 23 */
duke@435 24
duke@435 25 # include "incls/_precompiled.incl"
duke@435 26 # include "incls/_referenceProcessor.cpp.incl"
duke@435 27
duke@435 28 // List of discovered references.
duke@435 29 class DiscoveredList {
duke@435 30 public:
coleenp@548 31 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
coleenp@548 32 oop head() const {
coleenp@548 33 return UseCompressedOops ? oopDesc::decode_heap_oop_not_null(_compressed_head) :
coleenp@548 34 _oop_head;
coleenp@548 35 }
coleenp@548 36 HeapWord* adr_head() {
coleenp@548 37 return UseCompressedOops ? (HeapWord*)&_compressed_head :
coleenp@548 38 (HeapWord*)&_oop_head;
coleenp@548 39 }
coleenp@548 40 void set_head(oop o) {
coleenp@548 41 if (UseCompressedOops) {
coleenp@548 42 // Must compress the head ptr.
coleenp@548 43 _compressed_head = oopDesc::encode_heap_oop_not_null(o);
coleenp@548 44 } else {
coleenp@548 45 _oop_head = o;
coleenp@548 46 }
coleenp@548 47 }
coleenp@548 48 bool empty() const { return head() == ReferenceProcessor::sentinel_ref(); }
duke@435 49 size_t length() { return _len; }
ysr@887 50 void set_length(size_t len) { _len = len; }
ysr@887 51 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
ysr@887 52 void dec_length(size_t dec) { _len -= dec; }
duke@435 53 private:
coleenp@548 54 // Set value depending on UseCompressedOops. This could be a template class
coleenp@548 55 // but then we have to fix all the instantiations and declarations that use this class.
coleenp@548 56 oop _oop_head;
coleenp@548 57 narrowOop _compressed_head;
duke@435 58 size_t _len;
duke@435 59 };
duke@435 60
duke@435 61 oop ReferenceProcessor::_sentinelRef = NULL;
duke@435 62
duke@435 63 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
duke@435 64
duke@435 65 void referenceProcessor_init() {
duke@435 66 ReferenceProcessor::init_statics();
duke@435 67 }
duke@435 68
duke@435 69 void ReferenceProcessor::init_statics() {
coleenp@548 70 assert(_sentinelRef == NULL, "should be initialized precisely once");
duke@435 71 EXCEPTION_MARK;
duke@435 72 _sentinelRef = instanceKlass::cast(
coleenp@548 73 SystemDictionary::reference_klass())->
coleenp@548 74 allocate_permanent_instance(THREAD);
duke@435 75
duke@435 76 // Initialize the master soft ref clock.
duke@435 77 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
duke@435 78
duke@435 79 if (HAS_PENDING_EXCEPTION) {
duke@435 80 Handle ex(THREAD, PENDING_EXCEPTION);
duke@435 81 vm_exit_during_initialization(ex);
duke@435 82 }
duke@435 83 assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
duke@435 84 "Just constructed it!");
duke@435 85 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@435 86 RefDiscoveryPolicy == ReferentBasedDiscovery,
duke@435 87 "Unrecongnized RefDiscoveryPolicy");
duke@435 88 }
duke@435 89
coleenp@548 90 ReferenceProcessor*
coleenp@548 91 ReferenceProcessor::create_ref_processor(MemRegion span,
coleenp@548 92 bool atomic_discovery,
coleenp@548 93 bool mt_discovery,
coleenp@548 94 BoolObjectClosure* is_alive_non_header,
coleenp@548 95 int parallel_gc_threads,
ysr@777 96 bool mt_processing,
ysr@777 97 bool dl_needs_barrier) {
duke@435 98 int mt_degree = 1;
duke@435 99 if (parallel_gc_threads > 1) {
duke@435 100 mt_degree = parallel_gc_threads;
duke@435 101 }
duke@435 102 ReferenceProcessor* rp =
duke@435 103 new ReferenceProcessor(span, atomic_discovery,
duke@435 104 mt_discovery, mt_degree,
ysr@777 105 mt_processing && (parallel_gc_threads > 0),
ysr@777 106 dl_needs_barrier);
duke@435 107 if (rp == NULL) {
duke@435 108 vm_exit_during_initialization("Could not allocate ReferenceProcessor object");
duke@435 109 }
duke@435 110 rp->set_is_alive_non_header(is_alive_non_header);
duke@435 111 return rp;
duke@435 112 }
duke@435 113
duke@435 114 ReferenceProcessor::ReferenceProcessor(MemRegion span,
coleenp@548 115 bool atomic_discovery,
coleenp@548 116 bool mt_discovery,
coleenp@548 117 int mt_degree,
ysr@777 118 bool mt_processing,
ysr@777 119 bool discovered_list_needs_barrier) :
duke@435 120 _discovering_refs(false),
duke@435 121 _enqueuing_is_done(false),
duke@435 122 _is_alive_non_header(NULL),
ysr@777 123 _discovered_list_needs_barrier(discovered_list_needs_barrier),
ysr@777 124 _bs(NULL),
duke@435 125 _processing_is_mt(mt_processing),
duke@435 126 _next_id(0)
duke@435 127 {
duke@435 128 _span = span;
duke@435 129 _discovery_is_atomic = atomic_discovery;
duke@435 130 _discovery_is_mt = mt_discovery;
duke@435 131 _num_q = mt_degree;
duke@435 132 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _num_q * subclasses_of_ref);
duke@435 133 if (_discoveredSoftRefs == NULL) {
duke@435 134 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@435 135 }
duke@435 136 _discoveredWeakRefs = &_discoveredSoftRefs[_num_q];
duke@435 137 _discoveredFinalRefs = &_discoveredWeakRefs[_num_q];
duke@435 138 _discoveredPhantomRefs = &_discoveredFinalRefs[_num_q];
coleenp@548 139 assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
duke@435 140 // Initialized all entries to _sentinelRef
duke@435 141 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
coleenp@548 142 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 143 _discoveredSoftRefs[i].set_length(0);
duke@435 144 }
ysr@777 145 // If we do barreirs, cache a copy of the barrier set.
ysr@777 146 if (discovered_list_needs_barrier) {
ysr@777 147 _bs = Universe::heap()->barrier_set();
ysr@777 148 }
duke@435 149 }
duke@435 150
duke@435 151 #ifndef PRODUCT
duke@435 152 void ReferenceProcessor::verify_no_references_recorded() {
duke@435 153 guarantee(!_discovering_refs, "Discovering refs?");
duke@435 154 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
duke@435 155 guarantee(_discoveredSoftRefs[i].empty(),
duke@435 156 "Found non-empty discovered list");
duke@435 157 }
duke@435 158 }
duke@435 159 #endif
duke@435 160
duke@435 161 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
duke@435 162 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
coleenp@548 163 if (UseCompressedOops) {
coleenp@548 164 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 165 } else {
coleenp@548 166 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 167 }
duke@435 168 }
duke@435 169 }
duke@435 170
duke@435 171 void ReferenceProcessor::oops_do(OopClosure* f) {
coleenp@548 172 f->do_oop(adr_sentinel_ref());
duke@435 173 }
duke@435 174
coleenp@548 175 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@435 176 // Update (advance) the soft ref master clock field. This must be done
duke@435 177 // after processing the soft ref list.
duke@435 178 jlong now = os::javaTimeMillis();
duke@435 179 jlong clock = java_lang_ref_SoftReference::clock();
duke@435 180 NOT_PRODUCT(
duke@435 181 if (now < clock) {
duke@435 182 warning("time warp: %d to %d", clock, now);
duke@435 183 }
duke@435 184 )
duke@435 185 // In product mode, protect ourselves from system time being adjusted
duke@435 186 // externally and going backward; see note in the implementation of
duke@435 187 // GenCollectedHeap::time_since_last_gc() for the right way to fix
duke@435 188 // this uniformly throughout the VM; see bug-id 4741166. XXX
duke@435 189 if (now > clock) {
duke@435 190 java_lang_ref_SoftReference::set_clock(now);
duke@435 191 }
duke@435 192 // Else leave clock stalled at its old value until time progresses
duke@435 193 // past clock value.
duke@435 194 }
duke@435 195
coleenp@548 196 void ReferenceProcessor::process_discovered_references(
duke@435 197 ReferencePolicy* policy,
duke@435 198 BoolObjectClosure* is_alive,
duke@435 199 OopClosure* keep_alive,
duke@435 200 VoidClosure* complete_gc,
duke@435 201 AbstractRefProcTaskExecutor* task_executor) {
duke@435 202 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 203
duke@435 204 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@435 205 // Stop treating discovered references specially.
duke@435 206 disable_discovery();
duke@435 207
duke@435 208 bool trace_time = PrintGCDetails && PrintReferenceGC;
duke@435 209 // Soft references
duke@435 210 {
duke@435 211 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
duke@435 212 process_discovered_reflist(_discoveredSoftRefs, policy, true,
duke@435 213 is_alive, keep_alive, complete_gc, task_executor);
duke@435 214 }
duke@435 215
duke@435 216 update_soft_ref_master_clock();
duke@435 217
duke@435 218 // Weak references
duke@435 219 {
duke@435 220 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
duke@435 221 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
duke@435 222 is_alive, keep_alive, complete_gc, task_executor);
duke@435 223 }
duke@435 224
duke@435 225 // Final references
duke@435 226 {
duke@435 227 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
duke@435 228 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
duke@435 229 is_alive, keep_alive, complete_gc, task_executor);
duke@435 230 }
duke@435 231
duke@435 232 // Phantom references
duke@435 233 {
duke@435 234 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
duke@435 235 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
duke@435 236 is_alive, keep_alive, complete_gc, task_executor);
duke@435 237 }
duke@435 238
duke@435 239 // Weak global JNI references. It would make more sense (semantically) to
duke@435 240 // traverse these simultaneously with the regular weak references above, but
duke@435 241 // that is not how the JDK1.2 specification is. See #4126360. Native code can
duke@435 242 // thus use JNI weak references to circumvent the phantom references and
duke@435 243 // resurrect a "post-mortem" object.
duke@435 244 {
duke@435 245 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
duke@435 246 if (task_executor != NULL) {
duke@435 247 task_executor->set_single_threaded_mode();
duke@435 248 }
duke@435 249 process_phaseJNI(is_alive, keep_alive, complete_gc);
duke@435 250 }
duke@435 251 }
duke@435 252
duke@435 253 #ifndef PRODUCT
duke@435 254 // Calculate the number of jni handles.
coleenp@548 255 uint ReferenceProcessor::count_jni_refs() {
duke@435 256 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 257 public:
coleenp@548 258 virtual bool do_object_b(oop obj) { return true; }
coleenp@548 259 virtual void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 260 };
duke@435 261
duke@435 262 class CountHandleClosure: public OopClosure {
duke@435 263 private:
duke@435 264 int _count;
duke@435 265 public:
duke@435 266 CountHandleClosure(): _count(0) {}
coleenp@548 267 void do_oop(oop* unused) { _count++; }
coleenp@548 268 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
duke@435 269 int count() { return _count; }
duke@435 270 };
duke@435 271 CountHandleClosure global_handle_count;
duke@435 272 AlwaysAliveClosure always_alive;
duke@435 273 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
duke@435 274 return global_handle_count.count();
duke@435 275 }
duke@435 276 #endif
duke@435 277
duke@435 278 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
duke@435 279 OopClosure* keep_alive,
duke@435 280 VoidClosure* complete_gc) {
duke@435 281 #ifndef PRODUCT
duke@435 282 if (PrintGCDetails && PrintReferenceGC) {
duke@435 283 unsigned int count = count_jni_refs();
duke@435 284 gclog_or_tty->print(", %u refs", count);
duke@435 285 }
duke@435 286 #endif
duke@435 287 JNIHandles::weak_oops_do(is_alive, keep_alive);
duke@435 288 // Finally remember to keep sentinel around
coleenp@548 289 keep_alive->do_oop(adr_sentinel_ref());
duke@435 290 complete_gc->do_void();
duke@435 291 }
duke@435 292
coleenp@548 293
coleenp@548 294 template <class T>
coleenp@548 295 static bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
coleenp@548 296 AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 297
duke@435 298 // Remember old value of pending references list
coleenp@548 299 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
coleenp@548 300 T old_pending_list_value = *pending_list_addr;
duke@435 301
duke@435 302 // Enqueue references that are not made active again, and
duke@435 303 // clear the decks for the next collection (cycle).
coleenp@548 304 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
duke@435 305 // Do the oop-check on pending_list_addr missed in
duke@435 306 // enqueue_discovered_reflist. We should probably
duke@435 307 // do a raw oop_check so that future such idempotent
duke@435 308 // oop_stores relying on the oop-check side-effect
duke@435 309 // may be elided automatically and safely without
duke@435 310 // affecting correctness.
coleenp@548 311 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
duke@435 312
duke@435 313 // Stop treating discovered references specially.
coleenp@548 314 ref->disable_discovery();
duke@435 315
duke@435 316 // Return true if new pending references were added
duke@435 317 return old_pending_list_value != *pending_list_addr;
duke@435 318 }
duke@435 319
coleenp@548 320 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 321 NOT_PRODUCT(verify_ok_to_handle_reflists());
coleenp@548 322 if (UseCompressedOops) {
coleenp@548 323 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
coleenp@548 324 } else {
coleenp@548 325 return enqueue_discovered_ref_helper<oop>(this, task_executor);
coleenp@548 326 }
coleenp@548 327 }
coleenp@548 328
duke@435 329 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 330 HeapWord* pending_list_addr) {
duke@435 331 // Given a list of refs linked through the "discovered" field
duke@435 332 // (java.lang.ref.Reference.discovered) chain them through the
duke@435 333 // "next" field (java.lang.ref.Reference.next) and prepend
duke@435 334 // to the pending list.
duke@435 335 if (TraceReferenceGC && PrintGCDetails) {
duke@435 336 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
duke@435 337 INTPTR_FORMAT, (address)refs_list.head());
duke@435 338 }
duke@435 339 oop obj = refs_list.head();
duke@435 340 // Walk down the list, copying the discovered field into
duke@435 341 // the next field and clearing it (except for the last
duke@435 342 // non-sentinel object which is treated specially to avoid
duke@435 343 // confusion with an active reference).
coleenp@548 344 while (obj != sentinel_ref()) {
duke@435 345 assert(obj->is_instanceRef(), "should be reference object");
duke@435 346 oop next = java_lang_ref_Reference::discovered(obj);
duke@435 347 if (TraceReferenceGC && PrintGCDetails) {
coleenp@548 348 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
coleenp@548 349 obj, next);
duke@435 350 }
coleenp@548 351 assert(java_lang_ref_Reference::next(obj) == NULL,
coleenp@548 352 "The reference should not be enqueued");
coleenp@548 353 if (next == sentinel_ref()) { // obj is last
duke@435 354 // Swap refs_list into pendling_list_addr and
duke@435 355 // set obj's next to what we read from pending_list_addr.
coleenp@548 356 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
duke@435 357 // Need oop_check on pending_list_addr above;
duke@435 358 // see special oop-check code at the end of
duke@435 359 // enqueue_discovered_reflists() further below.
duke@435 360 if (old == NULL) {
duke@435 361 // obj should be made to point to itself, since
duke@435 362 // pending list was empty.
duke@435 363 java_lang_ref_Reference::set_next(obj, obj);
duke@435 364 } else {
duke@435 365 java_lang_ref_Reference::set_next(obj, old);
duke@435 366 }
duke@435 367 } else {
duke@435 368 java_lang_ref_Reference::set_next(obj, next);
duke@435 369 }
duke@435 370 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 371 obj = next;
duke@435 372 }
duke@435 373 }
duke@435 374
duke@435 375 // Parallel enqueue task
duke@435 376 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
duke@435 377 public:
duke@435 378 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
duke@435 379 DiscoveredList discovered_refs[],
coleenp@548 380 HeapWord* pending_list_addr,
duke@435 381 oop sentinel_ref,
duke@435 382 int n_queues)
duke@435 383 : EnqueueTask(ref_processor, discovered_refs,
duke@435 384 pending_list_addr, sentinel_ref, n_queues)
duke@435 385 { }
duke@435 386
coleenp@548 387 virtual void work(unsigned int work_id) {
duke@435 388 assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds");
duke@435 389 // Simplest first cut: static partitioning.
duke@435 390 int index = work_id;
duke@435 391 for (int j = 0; j < subclasses_of_ref; j++, index += _n_queues) {
duke@435 392 _ref_processor.enqueue_discovered_reflist(
duke@435 393 _refs_lists[index], _pending_list_addr);
duke@435 394 _refs_lists[index].set_head(_sentinel_ref);
duke@435 395 _refs_lists[index].set_length(0);
duke@435 396 }
duke@435 397 }
duke@435 398 };
duke@435 399
duke@435 400 // Enqueue references that are not made active again
coleenp@548 401 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
duke@435 402 AbstractRefProcTaskExecutor* task_executor) {
duke@435 403 if (_processing_is_mt && task_executor != NULL) {
duke@435 404 // Parallel code
duke@435 405 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
coleenp@548 406 pending_list_addr, sentinel_ref(), _num_q);
duke@435 407 task_executor->execute(tsk);
duke@435 408 } else {
duke@435 409 // Serial code: call the parent class's implementation
duke@435 410 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
duke@435 411 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
coleenp@548 412 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 413 _discoveredSoftRefs[i].set_length(0);
duke@435 414 }
duke@435 415 }
duke@435 416 }
duke@435 417
duke@435 418 // Iterator for the list of discovered references.
duke@435 419 class DiscoveredListIterator {
duke@435 420 public:
duke@435 421 inline DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 422 OopClosure* keep_alive,
duke@435 423 BoolObjectClosure* is_alive);
duke@435 424
duke@435 425 // End Of List.
coleenp@548 426 inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); }
duke@435 427
duke@435 428 // Get oop to the Reference object.
coleenp@548 429 inline oop obj() const { return _ref; }
duke@435 430
duke@435 431 // Get oop to the referent object.
coleenp@548 432 inline oop referent() const { return _referent; }
duke@435 433
duke@435 434 // Returns true if referent is alive.
duke@435 435 inline bool is_referent_alive() const;
duke@435 436
duke@435 437 // Loads data for the current reference.
duke@435 438 // The "allow_null_referent" argument tells us to allow for the possibility
duke@435 439 // of a NULL referent in the discovered Reference object. This typically
duke@435 440 // happens in the case of concurrent collectors that may have done the
ysr@887 441 // discovery concurrently, or interleaved, with mutator execution.
duke@435 442 inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
duke@435 443
duke@435 444 // Move to the next discovered reference.
duke@435 445 inline void next();
duke@435 446
ysr@887 447 // Remove the current reference from the list
duke@435 448 inline void remove();
duke@435 449
duke@435 450 // Make the Reference object active again.
duke@435 451 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
duke@435 452
duke@435 453 // Make the referent alive.
coleenp@548 454 inline void make_referent_alive() {
coleenp@548 455 if (UseCompressedOops) {
coleenp@548 456 _keep_alive->do_oop((narrowOop*)_referent_addr);
coleenp@548 457 } else {
coleenp@548 458 _keep_alive->do_oop((oop*)_referent_addr);
coleenp@548 459 }
coleenp@548 460 }
duke@435 461
duke@435 462 // Update the discovered field.
coleenp@548 463 inline void update_discovered() {
coleenp@548 464 // First _prev_next ref actually points into DiscoveredList (gross).
coleenp@548 465 if (UseCompressedOops) {
coleenp@548 466 _keep_alive->do_oop((narrowOop*)_prev_next);
coleenp@548 467 } else {
coleenp@548 468 _keep_alive->do_oop((oop*)_prev_next);
coleenp@548 469 }
coleenp@548 470 }
duke@435 471
duke@435 472 // NULL out referent pointer.
coleenp@548 473 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
duke@435 474
duke@435 475 // Statistics
duke@435 476 NOT_PRODUCT(
duke@435 477 inline size_t processed() const { return _processed; }
duke@435 478 inline size_t removed() const { return _removed; }
duke@435 479 )
duke@435 480
duke@435 481 inline void move_to_next();
duke@435 482
duke@435 483 private:
duke@435 484 DiscoveredList& _refs_list;
coleenp@548 485 HeapWord* _prev_next;
duke@435 486 oop _ref;
coleenp@548 487 HeapWord* _discovered_addr;
duke@435 488 oop _next;
coleenp@548 489 HeapWord* _referent_addr;
duke@435 490 oop _referent;
duke@435 491 OopClosure* _keep_alive;
duke@435 492 BoolObjectClosure* _is_alive;
duke@435 493 DEBUG_ONLY(
duke@435 494 oop _first_seen; // cyclic linked list check
duke@435 495 )
duke@435 496 NOT_PRODUCT(
duke@435 497 size_t _processed;
duke@435 498 size_t _removed;
duke@435 499 )
duke@435 500 };
duke@435 501
duke@435 502 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 503 OopClosure* keep_alive,
duke@435 504 BoolObjectClosure* is_alive)
duke@435 505 : _refs_list(refs_list),
coleenp@548 506 _prev_next(refs_list.adr_head()),
duke@435 507 _ref(refs_list.head()),
duke@435 508 #ifdef ASSERT
duke@435 509 _first_seen(refs_list.head()),
duke@435 510 #endif
duke@435 511 #ifndef PRODUCT
duke@435 512 _processed(0),
duke@435 513 _removed(0),
duke@435 514 #endif
duke@435 515 _next(refs_list.head()),
duke@435 516 _keep_alive(keep_alive),
duke@435 517 _is_alive(is_alive)
duke@435 518 { }
duke@435 519
coleenp@548 520 inline bool DiscoveredListIterator::is_referent_alive() const {
duke@435 521 return _is_alive->do_object_b(_referent);
duke@435 522 }
duke@435 523
coleenp@548 524 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
duke@435 525 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
coleenp@548 526 oop discovered = java_lang_ref_Reference::discovered(_ref);
coleenp@548 527 assert(_discovered_addr && discovered->is_oop_or_null(),
duke@435 528 "discovered field is bad");
coleenp@548 529 _next = discovered;
duke@435 530 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
coleenp@548 531 _referent = java_lang_ref_Reference::referent(_ref);
duke@435 532 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@435 533 "Wrong oop found in java.lang.Reference object");
duke@435 534 assert(allow_null_referent ?
duke@435 535 _referent->is_oop_or_null()
duke@435 536 : _referent->is_oop(),
duke@435 537 "bad referent");
duke@435 538 }
duke@435 539
coleenp@548 540 inline void DiscoveredListIterator::next() {
duke@435 541 _prev_next = _discovered_addr;
duke@435 542 move_to_next();
duke@435 543 }
duke@435 544
coleenp@548 545 inline void DiscoveredListIterator::remove() {
duke@435 546 assert(_ref->is_oop(), "Dropping a bad reference");
coleenp@548 547 oop_store_raw(_discovered_addr, NULL);
coleenp@548 548 // First _prev_next ref actually points into DiscoveredList (gross).
coleenp@548 549 if (UseCompressedOops) {
coleenp@548 550 // Remove Reference object from list.
coleenp@548 551 oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next);
coleenp@548 552 } else {
coleenp@548 553 // Remove Reference object from list.
coleenp@548 554 oopDesc::store_heap_oop((oop*)_prev_next, _next);
coleenp@548 555 }
duke@435 556 NOT_PRODUCT(_removed++);
ysr@887 557 _refs_list.dec_length(1);
duke@435 558 }
duke@435 559
coleenp@548 560 inline void DiscoveredListIterator::move_to_next() {
duke@435 561 _ref = _next;
duke@435 562 assert(_ref != _first_seen, "cyclic ref_list found");
duke@435 563 NOT_PRODUCT(_processed++);
duke@435 564 }
duke@435 565
duke@435 566 // NOTE: process_phase*() are largely similar, and at a high level
duke@435 567 // merely iterate over the extant list applying a predicate to
duke@435 568 // each of its elements and possibly removing that element from the
duke@435 569 // list and applying some further closures to that element.
duke@435 570 // We should consider the possibility of replacing these
duke@435 571 // process_phase*() methods by abstracting them into
duke@435 572 // a single general iterator invocation that receives appropriate
duke@435 573 // closures that accomplish this work.
duke@435 574
duke@435 575 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@435 576 // referents are not alive, but that should be kept alive for policy reasons.
duke@435 577 // Keep alive the transitive closure of all such referents.
duke@435 578 void
coleenp@548 579 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@435 580 ReferencePolicy* policy,
duke@435 581 BoolObjectClosure* is_alive,
duke@435 582 OopClosure* keep_alive,
duke@435 583 VoidClosure* complete_gc) {
duke@435 584 assert(policy != NULL, "Must have a non-NULL policy");
coleenp@548 585 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 586 // Decide which softly reachable refs should be kept alive.
duke@435 587 while (iter.has_next()) {
duke@435 588 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@435 589 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
duke@435 590 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
duke@435 591 if (TraceReferenceGC) {
duke@435 592 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
coleenp@548 593 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 594 }
ysr@887 595 // Remove Reference object from list
ysr@887 596 iter.remove();
duke@435 597 // Make the Reference object active again
duke@435 598 iter.make_active();
duke@435 599 // keep the referent around
duke@435 600 iter.make_referent_alive();
ysr@887 601 iter.move_to_next();
duke@435 602 } else {
duke@435 603 iter.next();
duke@435 604 }
duke@435 605 }
duke@435 606 // Close the reachable set
duke@435 607 complete_gc->do_void();
duke@435 608 NOT_PRODUCT(
duke@435 609 if (PrintGCDetails && TraceReferenceGC) {
duke@435 610 gclog_or_tty->print(" Dropped %d dead Refs out of %d "
duke@435 611 "discovered Refs by policy ", iter.removed(), iter.processed());
duke@435 612 }
duke@435 613 )
duke@435 614 }
duke@435 615
duke@435 616 // Traverse the list and remove any Refs that are not active, or
duke@435 617 // whose referents are either alive or NULL.
duke@435 618 void
coleenp@548 619 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@435 620 BoolObjectClosure* is_alive,
coleenp@548 621 OopClosure* keep_alive) {
duke@435 622 assert(discovery_is_atomic(), "Error");
coleenp@548 623 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 624 while (iter.has_next()) {
duke@435 625 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@548 626 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@548 627 assert(next == NULL, "Should not discover inactive Reference");
duke@435 628 if (iter.is_referent_alive()) {
duke@435 629 if (TraceReferenceGC) {
duke@435 630 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
coleenp@548 631 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 632 }
duke@435 633 // The referent is reachable after all.
ysr@887 634 // Remove Reference object from list.
ysr@887 635 iter.remove();
duke@435 636 // Update the referent pointer as necessary: Note that this
duke@435 637 // should not entail any recursive marking because the
duke@435 638 // referent must already have been traversed.
duke@435 639 iter.make_referent_alive();
ysr@887 640 iter.move_to_next();
duke@435 641 } else {
duke@435 642 iter.next();
duke@435 643 }
duke@435 644 }
duke@435 645 NOT_PRODUCT(
duke@435 646 if (PrintGCDetails && TraceReferenceGC) {
duke@435 647 gclog_or_tty->print(" Dropped %d active Refs out of %d "
duke@435 648 "Refs in discovered list ", iter.removed(), iter.processed());
duke@435 649 }
duke@435 650 )
duke@435 651 }
duke@435 652
duke@435 653 void
coleenp@548 654 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@548 655 BoolObjectClosure* is_alive,
coleenp@548 656 OopClosure* keep_alive,
coleenp@548 657 VoidClosure* complete_gc) {
duke@435 658 assert(!discovery_is_atomic(), "Error");
coleenp@548 659 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 660 while (iter.has_next()) {
duke@435 661 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 662 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
coleenp@548 663 oop next = java_lang_ref_Reference::next(iter.obj());
duke@435 664 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 665 next != NULL)) {
coleenp@548 666 assert(next->is_oop_or_null(), "bad next field");
duke@435 667 // Remove Reference object from list
duke@435 668 iter.remove();
duke@435 669 // Trace the cohorts
duke@435 670 iter.make_referent_alive();
coleenp@548 671 if (UseCompressedOops) {
coleenp@548 672 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@548 673 } else {
coleenp@548 674 keep_alive->do_oop((oop*)next_addr);
coleenp@548 675 }
ysr@887 676 iter.move_to_next();
duke@435 677 } else {
duke@435 678 iter.next();
duke@435 679 }
duke@435 680 }
duke@435 681 // Now close the newly reachable set
duke@435 682 complete_gc->do_void();
duke@435 683 NOT_PRODUCT(
duke@435 684 if (PrintGCDetails && TraceReferenceGC) {
duke@435 685 gclog_or_tty->print(" Dropped %d active Refs out of %d "
duke@435 686 "Refs in discovered list ", iter.removed(), iter.processed());
duke@435 687 }
duke@435 688 )
duke@435 689 }
duke@435 690
duke@435 691 // Traverse the list and process the referents, by either
coleenp@548 692 // clearing them or keeping them (and their reachable
duke@435 693 // closure) alive.
duke@435 694 void
coleenp@548 695 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
duke@435 696 bool clear_referent,
duke@435 697 BoolObjectClosure* is_alive,
duke@435 698 OopClosure* keep_alive,
duke@435 699 VoidClosure* complete_gc) {
coleenp@548 700 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 701 while (iter.has_next()) {
duke@435 702 iter.update_discovered();
duke@435 703 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@435 704 if (clear_referent) {
duke@435 705 // NULL out referent pointer
duke@435 706 iter.clear_referent();
duke@435 707 } else {
duke@435 708 // keep the referent around
duke@435 709 iter.make_referent_alive();
duke@435 710 }
duke@435 711 if (TraceReferenceGC) {
duke@435 712 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
duke@435 713 clear_referent ? "cleared " : "",
coleenp@548 714 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 715 }
duke@435 716 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
duke@435 717 // If discovery is concurrent, we may have objects with null referents,
duke@435 718 // being those that were concurrently cleared after they were discovered
duke@435 719 // (and not subsequently precleaned).
duke@435 720 assert( (discovery_is_atomic() && iter.referent()->is_oop())
duke@435 721 || (!discovery_is_atomic() && iter.referent()->is_oop_or_null(UseConcMarkSweepGC)),
duke@435 722 "Adding a bad referent");
duke@435 723 iter.next();
duke@435 724 }
duke@435 725 // Remember to keep sentinel pointer around
duke@435 726 iter.update_discovered();
duke@435 727 // Close the reachable set
duke@435 728 complete_gc->do_void();
duke@435 729 }
duke@435 730
duke@435 731 void
coleenp@548 732 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
coleenp@548 733 oop obj = refs_list.head();
coleenp@548 734 while (obj != sentinel_ref()) {
coleenp@548 735 oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 736 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
coleenp@548 737 obj = discovered;
duke@435 738 }
coleenp@548 739 refs_list.set_head(sentinel_ref());
coleenp@548 740 refs_list.set_length(0);
duke@435 741 }
duke@435 742
ysr@777 743 void ReferenceProcessor::abandon_partial_discovery() {
ysr@777 744 // loop over the lists
ysr@777 745 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
ysr@777 746 if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
ysr@777 747 gclog_or_tty->print_cr(
ysr@777 748 "\nAbandoning %s discovered list",
ysr@777 749 list_name(i));
ysr@777 750 }
ysr@777 751 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
duke@435 752 }
duke@435 753 }
duke@435 754
duke@435 755 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 756 public:
duke@435 757 RefProcPhase1Task(ReferenceProcessor& ref_processor,
duke@435 758 DiscoveredList refs_lists[],
duke@435 759 ReferencePolicy* policy,
duke@435 760 bool marks_oops_alive)
duke@435 761 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 762 _policy(policy)
duke@435 763 { }
duke@435 764 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 765 OopClosure& keep_alive,
duke@435 766 VoidClosure& complete_gc)
duke@435 767 {
duke@435 768 _ref_processor.process_phase1(_refs_lists[i], _policy,
duke@435 769 &is_alive, &keep_alive, &complete_gc);
duke@435 770 }
duke@435 771 private:
duke@435 772 ReferencePolicy* _policy;
duke@435 773 };
duke@435 774
duke@435 775 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 776 public:
duke@435 777 RefProcPhase2Task(ReferenceProcessor& ref_processor,
duke@435 778 DiscoveredList refs_lists[],
duke@435 779 bool marks_oops_alive)
duke@435 780 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
duke@435 781 { }
duke@435 782 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 783 OopClosure& keep_alive,
duke@435 784 VoidClosure& complete_gc)
duke@435 785 {
duke@435 786 _ref_processor.process_phase2(_refs_lists[i],
duke@435 787 &is_alive, &keep_alive, &complete_gc);
duke@435 788 }
duke@435 789 };
duke@435 790
duke@435 791 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 792 public:
duke@435 793 RefProcPhase3Task(ReferenceProcessor& ref_processor,
duke@435 794 DiscoveredList refs_lists[],
duke@435 795 bool clear_referent,
duke@435 796 bool marks_oops_alive)
duke@435 797 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 798 _clear_referent(clear_referent)
duke@435 799 { }
duke@435 800 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 801 OopClosure& keep_alive,
duke@435 802 VoidClosure& complete_gc)
duke@435 803 {
duke@435 804 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@435 805 &is_alive, &keep_alive, &complete_gc);
duke@435 806 }
duke@435 807 private:
duke@435 808 bool _clear_referent;
duke@435 809 };
duke@435 810
duke@435 811 // Balances reference queues.
duke@435 812 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@435 813 {
duke@435 814 // calculate total length
duke@435 815 size_t total_refs = 0;
duke@435 816 for (int i = 0; i < _num_q; ++i) {
duke@435 817 total_refs += ref_lists[i].length();
duke@435 818 }
duke@435 819 size_t avg_refs = total_refs / _num_q + 1;
duke@435 820 int to_idx = 0;
duke@435 821 for (int from_idx = 0; from_idx < _num_q; from_idx++) {
duke@435 822 while (ref_lists[from_idx].length() > avg_refs) {
duke@435 823 assert(to_idx < _num_q, "Sanity Check!");
duke@435 824 if (ref_lists[to_idx].length() < avg_refs) {
duke@435 825 // move superfluous refs
duke@435 826 size_t refs_to_move =
duke@435 827 MIN2(ref_lists[from_idx].length() - avg_refs,
duke@435 828 avg_refs - ref_lists[to_idx].length());
duke@435 829 oop move_head = ref_lists[from_idx].head();
duke@435 830 oop move_tail = move_head;
duke@435 831 oop new_head = move_head;
duke@435 832 // find an element to split the list on
duke@435 833 for (size_t j = 0; j < refs_to_move; ++j) {
duke@435 834 move_tail = new_head;
coleenp@548 835 new_head = java_lang_ref_Reference::discovered(new_head);
duke@435 836 }
duke@435 837 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
duke@435 838 ref_lists[to_idx].set_head(move_head);
ysr@887 839 ref_lists[to_idx].inc_length(refs_to_move);
duke@435 840 ref_lists[from_idx].set_head(new_head);
ysr@887 841 ref_lists[from_idx].dec_length(refs_to_move);
duke@435 842 } else {
duke@435 843 ++to_idx;
duke@435 844 }
duke@435 845 }
duke@435 846 }
duke@435 847 }
duke@435 848
duke@435 849 void
duke@435 850 ReferenceProcessor::process_discovered_reflist(
duke@435 851 DiscoveredList refs_lists[],
duke@435 852 ReferencePolicy* policy,
duke@435 853 bool clear_referent,
duke@435 854 BoolObjectClosure* is_alive,
duke@435 855 OopClosure* keep_alive,
duke@435 856 VoidClosure* complete_gc,
duke@435 857 AbstractRefProcTaskExecutor* task_executor)
duke@435 858 {
duke@435 859 bool mt = task_executor != NULL && _processing_is_mt;
duke@435 860 if (mt && ParallelRefProcBalancingEnabled) {
duke@435 861 balance_queues(refs_lists);
duke@435 862 }
duke@435 863 if (PrintReferenceGC && PrintGCDetails) {
duke@435 864 size_t total = 0;
duke@435 865 for (int i = 0; i < _num_q; ++i) {
duke@435 866 total += refs_lists[i].length();
duke@435 867 }
duke@435 868 gclog_or_tty->print(", %u refs", total);
duke@435 869 }
duke@435 870
duke@435 871 // Phase 1 (soft refs only):
duke@435 872 // . Traverse the list and remove any SoftReferences whose
duke@435 873 // referents are not alive, but that should be kept alive for
duke@435 874 // policy reasons. Keep alive the transitive closure of all
duke@435 875 // such referents.
duke@435 876 if (policy != NULL) {
duke@435 877 if (mt) {
duke@435 878 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
duke@435 879 task_executor->execute(phase1);
duke@435 880 } else {
duke@435 881 for (int i = 0; i < _num_q; i++) {
duke@435 882 process_phase1(refs_lists[i], policy,
duke@435 883 is_alive, keep_alive, complete_gc);
duke@435 884 }
duke@435 885 }
duke@435 886 } else { // policy == NULL
duke@435 887 assert(refs_lists != _discoveredSoftRefs,
duke@435 888 "Policy must be specified for soft references.");
duke@435 889 }
duke@435 890
duke@435 891 // Phase 2:
duke@435 892 // . Traverse the list and remove any refs whose referents are alive.
duke@435 893 if (mt) {
duke@435 894 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
duke@435 895 task_executor->execute(phase2);
duke@435 896 } else {
duke@435 897 for (int i = 0; i < _num_q; i++) {
duke@435 898 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
duke@435 899 }
duke@435 900 }
duke@435 901
duke@435 902 // Phase 3:
duke@435 903 // . Traverse the list and process referents as appropriate.
duke@435 904 if (mt) {
duke@435 905 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
duke@435 906 task_executor->execute(phase3);
duke@435 907 } else {
duke@435 908 for (int i = 0; i < _num_q; i++) {
duke@435 909 process_phase3(refs_lists[i], clear_referent,
duke@435 910 is_alive, keep_alive, complete_gc);
duke@435 911 }
duke@435 912 }
duke@435 913 }
duke@435 914
duke@435 915 void ReferenceProcessor::clean_up_discovered_references() {
duke@435 916 // loop over the lists
duke@435 917 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
duke@435 918 if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
duke@435 919 gclog_or_tty->print_cr(
duke@435 920 "\nScrubbing %s discovered list of Null referents",
duke@435 921 list_name(i));
duke@435 922 }
duke@435 923 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
duke@435 924 }
duke@435 925 }
duke@435 926
duke@435 927 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
duke@435 928 assert(!discovery_is_atomic(), "Else why call this method?");
duke@435 929 DiscoveredListIterator iter(refs_list, NULL, NULL);
duke@435 930 while (iter.has_next()) {
duke@435 931 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 932 oop next = java_lang_ref_Reference::next(iter.obj());
coleenp@548 933 assert(next->is_oop_or_null(), "bad next field");
duke@435 934 // If referent has been cleared or Reference is not active,
duke@435 935 // drop it.
coleenp@548 936 if (iter.referent() == NULL || next != NULL) {
duke@435 937 debug_only(
duke@435 938 if (PrintGCDetails && TraceReferenceGC) {
duke@435 939 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
duke@435 940 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
duke@435 941 " and referent: " INTPTR_FORMAT,
coleenp@548 942 iter.obj(), next, iter.referent());
duke@435 943 }
duke@435 944 )
duke@435 945 // Remove Reference object from list
duke@435 946 iter.remove();
ysr@887 947 iter.move_to_next();
duke@435 948 } else {
duke@435 949 iter.next();
duke@435 950 }
duke@435 951 }
duke@435 952 NOT_PRODUCT(
duke@435 953 if (PrintGCDetails && TraceReferenceGC) {
duke@435 954 gclog_or_tty->print(
duke@435 955 " Removed %d Refs with NULL referents out of %d discovered Refs",
duke@435 956 iter.removed(), iter.processed());
duke@435 957 }
duke@435 958 )
duke@435 959 }
duke@435 960
duke@435 961 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
duke@435 962 int id = 0;
duke@435 963 // Determine the queue index to use for this object.
duke@435 964 if (_discovery_is_mt) {
duke@435 965 // During a multi-threaded discovery phase,
duke@435 966 // each thread saves to its "own" list.
duke@435 967 Thread* thr = Thread::current();
duke@435 968 assert(thr->is_GC_task_thread(),
duke@435 969 "Dubious cast from Thread* to WorkerThread*?");
duke@435 970 id = ((WorkerThread*)thr)->id();
duke@435 971 } else {
duke@435 972 // single-threaded discovery, we save in round-robin
duke@435 973 // fashion to each of the lists.
duke@435 974 if (_processing_is_mt) {
duke@435 975 id = next_id();
duke@435 976 }
duke@435 977 }
duke@435 978 assert(0 <= id && id < _num_q, "Id is out-of-bounds (call Freud?)");
duke@435 979
duke@435 980 // Get the discovered queue to which we will add
duke@435 981 DiscoveredList* list = NULL;
duke@435 982 switch (rt) {
duke@435 983 case REF_OTHER:
duke@435 984 // Unknown reference type, no special treatment
duke@435 985 break;
duke@435 986 case REF_SOFT:
duke@435 987 list = &_discoveredSoftRefs[id];
duke@435 988 break;
duke@435 989 case REF_WEAK:
duke@435 990 list = &_discoveredWeakRefs[id];
duke@435 991 break;
duke@435 992 case REF_FINAL:
duke@435 993 list = &_discoveredFinalRefs[id];
duke@435 994 break;
duke@435 995 case REF_PHANTOM:
duke@435 996 list = &_discoveredPhantomRefs[id];
duke@435 997 break;
duke@435 998 case REF_NONE:
duke@435 999 // we should not reach here if we are an instanceRefKlass
duke@435 1000 default:
duke@435 1001 ShouldNotReachHere();
duke@435 1002 }
duke@435 1003 return list;
duke@435 1004 }
duke@435 1005
coleenp@548 1006 inline void
coleenp@548 1007 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@548 1008 oop obj,
coleenp@548 1009 HeapWord* discovered_addr) {
duke@435 1010 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@435 1011 // First we must make sure this object is only enqueued once. CAS in a non null
duke@435 1012 // discovered_addr.
ysr@777 1013 oop current_head = refs_list.head();
ysr@777 1014
ysr@777 1015 // Note: In the case of G1, this pre-barrier is strictly
ysr@777 1016 // not necessary because the only case we are interested in
ysr@777 1017 // here is when *discovered_addr is NULL, so this will expand to
ysr@777 1018 // nothing. As a result, I am just manually eliding this out for G1.
ysr@777 1019 if (_discovered_list_needs_barrier && !UseG1GC) {
ysr@777 1020 _bs->write_ref_field_pre((void*)discovered_addr, current_head); guarantee(false, "Needs to be fixed: YSR");
ysr@777 1021 }
ysr@777 1022 oop retest = oopDesc::atomic_compare_exchange_oop(current_head, discovered_addr,
coleenp@548 1023 NULL);
duke@435 1024 if (retest == NULL) {
duke@435 1025 // This thread just won the right to enqueue the object.
duke@435 1026 // We have separate lists for enqueueing so no synchronization
duke@435 1027 // is necessary.
coleenp@548 1028 refs_list.set_head(obj);
ysr@887 1029 refs_list.inc_length(1);
ysr@777 1030 if (_discovered_list_needs_barrier) {
ysr@777 1031 _bs->write_ref_field((void*)discovered_addr, current_head); guarantee(false, "Needs to be fixed: YSR");
ysr@777 1032 }
ysr@777 1033
duke@435 1034 } else {
duke@435 1035 // If retest was non NULL, another thread beat us to it:
duke@435 1036 // The reference has already been discovered...
duke@435 1037 if (TraceReferenceGC) {
duke@435 1038 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
duke@435 1039 obj, obj->blueprint()->internal_name());
duke@435 1040 }
duke@435 1041 }
duke@435 1042 }
duke@435 1043
duke@435 1044 // We mention two of several possible choices here:
duke@435 1045 // #0: if the reference object is not in the "originating generation"
duke@435 1046 // (or part of the heap being collected, indicated by our "span"
duke@435 1047 // we don't treat it specially (i.e. we scan it as we would
duke@435 1048 // a normal oop, treating its references as strong references).
duke@435 1049 // This means that references can't be enqueued unless their
duke@435 1050 // referent is also in the same span. This is the simplest,
duke@435 1051 // most "local" and most conservative approach, albeit one
duke@435 1052 // that may cause weak references to be enqueued least promptly.
duke@435 1053 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@435 1054 // #1: the reference object may be in any generation (span), but if
duke@435 1055 // the referent is in the generation (span) being currently collected
duke@435 1056 // then we can discover the reference object, provided
duke@435 1057 // the object has not already been discovered by
duke@435 1058 // a different concurrently running collector (as may be the
duke@435 1059 // case, for instance, if the reference object is in CMS and
duke@435 1060 // the referent in DefNewGeneration), and provided the processing
duke@435 1061 // of this reference object by the current collector will
duke@435 1062 // appear atomic to every other collector in the system.
duke@435 1063 // (Thus, for instance, a concurrent collector may not
duke@435 1064 // discover references in other generations even if the
duke@435 1065 // referent is in its own generation). This policy may,
duke@435 1066 // in certain cases, enqueue references somewhat sooner than
duke@435 1067 // might Policy #0 above, but at marginally increased cost
duke@435 1068 // and complexity in processing these references.
duke@435 1069 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@435 1070 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
duke@435 1071 // We enqueue references only if we are discovering refs
duke@435 1072 // (rather than processing discovered refs).
duke@435 1073 if (!_discovering_refs || !RegisterReferences) {
duke@435 1074 return false;
duke@435 1075 }
duke@435 1076 // We only enqueue active references.
coleenp@548 1077 oop next = java_lang_ref_Reference::next(obj);
coleenp@548 1078 if (next != NULL) {
duke@435 1079 return false;
duke@435 1080 }
duke@435 1081
duke@435 1082 HeapWord* obj_addr = (HeapWord*)obj;
duke@435 1083 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1084 !_span.contains(obj_addr)) {
duke@435 1085 // Reference is not in the originating generation;
duke@435 1086 // don't treat it specially (i.e. we want to scan it as a normal
duke@435 1087 // object with strong references).
duke@435 1088 return false;
duke@435 1089 }
duke@435 1090
duke@435 1091 // We only enqueue references whose referents are not (yet) strongly
duke@435 1092 // reachable.
duke@435 1093 if (is_alive_non_header() != NULL) {
duke@435 1094 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 1095 // We'd like to assert the following:
duke@435 1096 // assert(referent != NULL, "Refs with null referents already filtered");
duke@435 1097 // However, since this code may be executed concurrently with
duke@435 1098 // mutators, which can clear() the referent, it is not
duke@435 1099 // guaranteed that the referent is non-NULL.
duke@435 1100 if (is_alive_non_header()->do_object_b(referent)) {
duke@435 1101 return false; // referent is reachable
duke@435 1102 }
duke@435 1103 }
duke@435 1104
ysr@777 1105 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
ysr@777 1106 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 1107 assert(discovered->is_oop_or_null(), "bad discovered field");
coleenp@548 1108 if (discovered != NULL) {
duke@435 1109 // The reference has already been discovered...
duke@435 1110 if (TraceReferenceGC) {
duke@435 1111 gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1112 obj, obj->blueprint()->internal_name());
duke@435 1113 }
duke@435 1114 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1115 // assumes that an object is not processed twice;
duke@435 1116 // if it's been already discovered it must be on another
duke@435 1117 // generation's discovered list; so we won't discover it.
duke@435 1118 return false;
duke@435 1119 } else {
duke@435 1120 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@435 1121 "Unrecognized policy");
duke@435 1122 // Check assumption that an object is not potentially
duke@435 1123 // discovered twice except by concurrent collectors that potentially
duke@435 1124 // trace the same Reference object twice.
duke@435 1125 assert(UseConcMarkSweepGC,
ysr@777 1126 "Only possible with an incremental-update concurrent collector");
duke@435 1127 return true;
duke@435 1128 }
duke@435 1129 }
duke@435 1130
duke@435 1131 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1132 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 1133 assert(referent->is_oop(), "bad referent");
duke@435 1134 // enqueue if and only if either:
duke@435 1135 // reference is in our span or
duke@435 1136 // we are an atomic collector and referent is in our span
duke@435 1137 if (_span.contains(obj_addr) ||
duke@435 1138 (discovery_is_atomic() && _span.contains(referent))) {
duke@435 1139 // should_enqueue = true;
duke@435 1140 } else {
duke@435 1141 return false;
duke@435 1142 }
duke@435 1143 } else {
duke@435 1144 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1145 _span.contains(obj_addr), "code inconsistency");
duke@435 1146 }
duke@435 1147
duke@435 1148 // Get the right type of discovered queue head.
duke@435 1149 DiscoveredList* list = get_discovered_list(rt);
duke@435 1150 if (list == NULL) {
duke@435 1151 return false; // nothing special needs to be done
duke@435 1152 }
duke@435 1153
duke@435 1154 if (_discovery_is_mt) {
duke@435 1155 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@435 1156 } else {
ysr@777 1157 // If "_discovered_list_needs_barrier", we do write barriers when
ysr@777 1158 // updating the discovered reference list. Otherwise, we do a raw store
ysr@777 1159 // here: the field will be visited later when processing the discovered
ysr@777 1160 // references.
ysr@777 1161 oop current_head = list->head();
ysr@777 1162 // As in the case further above, since we are over-writing a NULL
ysr@777 1163 // pre-value, we can safely elide the pre-barrier here for the case of G1.
ysr@777 1164 assert(discovered == NULL, "control point invariant");
ysr@777 1165 if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1
ysr@777 1166 _bs->write_ref_field_pre((oop*)discovered_addr, current_head);
ysr@777 1167 }
ysr@777 1168 oop_store_raw(discovered_addr, current_head);
ysr@777 1169 if (_discovered_list_needs_barrier) {
ysr@777 1170 _bs->write_ref_field((oop*)discovered_addr, current_head);
ysr@777 1171 }
duke@435 1172 list->set_head(obj);
ysr@887 1173 list->inc_length(1);
duke@435 1174 }
duke@435 1175
duke@435 1176 // In the MT discovery case, it is currently possible to see
duke@435 1177 // the following message multiple times if several threads
duke@435 1178 // discover a reference about the same time. Only one will
duke@435 1179 // however have actually added it to the disocvered queue.
duke@435 1180 // One could let add_to_discovered_list_mt() return an
duke@435 1181 // indication for success in queueing (by 1 thread) or
duke@435 1182 // failure (by all other threads), but I decided the extra
duke@435 1183 // code was not worth the effort for something that is
duke@435 1184 // only used for debugging support.
duke@435 1185 if (TraceReferenceGC) {
duke@435 1186 oop referent = java_lang_ref_Reference::referent(obj);
duke@435 1187 if (PrintGCDetails) {
duke@435 1188 gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1189 obj, obj->blueprint()->internal_name());
duke@435 1190 }
duke@435 1191 assert(referent->is_oop(), "Enqueued a bad referent");
duke@435 1192 }
duke@435 1193 assert(obj->is_oop(), "Enqueued a bad reference");
duke@435 1194 return true;
duke@435 1195 }
duke@435 1196
duke@435 1197 // Preclean the discovered references by removing those
duke@435 1198 // whose referents are alive, and by marking from those that
duke@435 1199 // are not active. These lists can be handled here
duke@435 1200 // in any order and, indeed, concurrently.
duke@435 1201 void ReferenceProcessor::preclean_discovered_references(
duke@435 1202 BoolObjectClosure* is_alive,
duke@435 1203 OopClosure* keep_alive,
duke@435 1204 VoidClosure* complete_gc,
duke@435 1205 YieldClosure* yield) {
duke@435 1206
duke@435 1207 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 1208
duke@435 1209 // Soft references
duke@435 1210 {
duke@435 1211 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1212 false, gclog_or_tty);
duke@435 1213 for (int i = 0; i < _num_q; i++) {
ysr@887 1214 if (yield->should_return()) {
ysr@887 1215 return;
ysr@887 1216 }
duke@435 1217 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@435 1218 keep_alive, complete_gc, yield);
duke@435 1219 }
duke@435 1220 }
duke@435 1221
duke@435 1222 // Weak references
duke@435 1223 {
duke@435 1224 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1225 false, gclog_or_tty);
duke@435 1226 for (int i = 0; i < _num_q; i++) {
ysr@887 1227 if (yield->should_return()) {
ysr@887 1228 return;
ysr@887 1229 }
duke@435 1230 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@435 1231 keep_alive, complete_gc, yield);
duke@435 1232 }
duke@435 1233 }
duke@435 1234
duke@435 1235 // Final references
duke@435 1236 {
duke@435 1237 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1238 false, gclog_or_tty);
duke@435 1239 for (int i = 0; i < _num_q; i++) {
ysr@887 1240 if (yield->should_return()) {
ysr@887 1241 return;
ysr@887 1242 }
duke@435 1243 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@435 1244 keep_alive, complete_gc, yield);
duke@435 1245 }
duke@435 1246 }
duke@435 1247
duke@435 1248 // Phantom references
duke@435 1249 {
duke@435 1250 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1251 false, gclog_or_tty);
duke@435 1252 for (int i = 0; i < _num_q; i++) {
ysr@887 1253 if (yield->should_return()) {
ysr@887 1254 return;
ysr@887 1255 }
duke@435 1256 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@435 1257 keep_alive, complete_gc, yield);
duke@435 1258 }
duke@435 1259 }
duke@435 1260 }
duke@435 1261
duke@435 1262 // Walk the given discovered ref list, and remove all reference objects
duke@435 1263 // whose referents are still alive, whose referents are NULL or which
ysr@887 1264 // are not active (have a non-NULL next field). NOTE: When we are
ysr@887 1265 // thus precleaning the ref lists (which happens single-threaded today),
ysr@887 1266 // we do not disable refs discovery to honour the correct semantics of
ysr@887 1267 // java.lang.Reference. As a result, we need to be careful below
ysr@887 1268 // that ref removal steps interleave safely with ref discovery steps
ysr@887 1269 // (in this thread).
coleenp@548 1270 void
coleenp@548 1271 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 1272 BoolObjectClosure* is_alive,
coleenp@548 1273 OopClosure* keep_alive,
coleenp@548 1274 VoidClosure* complete_gc,
coleenp@548 1275 YieldClosure* yield) {
duke@435 1276 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 1277 while (iter.has_next()) {
duke@435 1278 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1279 oop obj = iter.obj();
coleenp@548 1280 oop next = java_lang_ref_Reference::next(obj);
duke@435 1281 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 1282 next != NULL) {
duke@435 1283 // The referent has been cleared, or is alive, or the Reference is not
duke@435 1284 // active; we need to trace and mark its cohort.
duke@435 1285 if (TraceReferenceGC) {
duke@435 1286 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
duke@435 1287 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 1288 }
duke@435 1289 // Remove Reference object from list
duke@435 1290 iter.remove();
duke@435 1291 // Keep alive its cohort.
duke@435 1292 iter.make_referent_alive();
coleenp@548 1293 if (UseCompressedOops) {
coleenp@548 1294 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1295 keep_alive->do_oop(next_addr);
coleenp@548 1296 } else {
coleenp@548 1297 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1298 keep_alive->do_oop(next_addr);
coleenp@548 1299 }
ysr@887 1300 iter.move_to_next();
duke@435 1301 } else {
duke@435 1302 iter.next();
duke@435 1303 }
duke@435 1304 }
duke@435 1305 // Close the reachable set
duke@435 1306 complete_gc->do_void();
duke@435 1307
duke@435 1308 NOT_PRODUCT(
duke@435 1309 if (PrintGCDetails && PrintReferenceGC) {
duke@435 1310 gclog_or_tty->print(" Dropped %d Refs out of %d "
duke@435 1311 "Refs in discovered list ", iter.removed(), iter.processed());
duke@435 1312 }
duke@435 1313 )
duke@435 1314 }
duke@435 1315
duke@435 1316 const char* ReferenceProcessor::list_name(int i) {
duke@435 1317 assert(i >= 0 && i <= _num_q * subclasses_of_ref, "Out of bounds index");
duke@435 1318 int j = i / _num_q;
duke@435 1319 switch (j) {
duke@435 1320 case 0: return "SoftRef";
duke@435 1321 case 1: return "WeakRef";
duke@435 1322 case 2: return "FinalRef";
duke@435 1323 case 3: return "PhantomRef";
duke@435 1324 }
duke@435 1325 ShouldNotReachHere();
duke@435 1326 return NULL;
duke@435 1327 }
duke@435 1328
duke@435 1329 #ifndef PRODUCT
duke@435 1330 void ReferenceProcessor::verify_ok_to_handle_reflists() {
duke@435 1331 // empty for now
duke@435 1332 }
duke@435 1333 #endif
duke@435 1334
duke@435 1335 void ReferenceProcessor::verify() {
coleenp@548 1336 guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
duke@435 1337 }
duke@435 1338
duke@435 1339 #ifndef PRODUCT
duke@435 1340 void ReferenceProcessor::clear_discovered_references() {
duke@435 1341 guarantee(!_discovering_refs, "Discovering refs?");
duke@435 1342 for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
duke@435 1343 oop obj = _discoveredSoftRefs[i].head();
coleenp@548 1344 while (obj != sentinel_ref()) {
duke@435 1345 oop next = java_lang_ref_Reference::discovered(obj);
duke@435 1346 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 1347 obj = next;
duke@435 1348 }
coleenp@548 1349 _discoveredSoftRefs[i].set_head(sentinel_ref());
duke@435 1350 _discoveredSoftRefs[i].set_length(0);
duke@435 1351 }
duke@435 1352 }
duke@435 1353 #endif // PRODUCT

mercurial