src/share/vm/memory/referenceProcessor.cpp

Wed, 07 Sep 2011 13:55:42 -0700

author
ysr
date
Wed, 07 Sep 2011 13:55:42 -0700
changeset 3117
eca1193ca245
parent 3115
c2bf0120ee5d
child 3175
4dfb2df418f2
permissions
-rw-r--r--

4965777: GC changes to support use of discovered field for pending references
Summary: If and when the reference handler thread is able to use the discovered field to link reference objects in its pending list, so will GC. In that case, GC will scan through this field once a reference object has been placed on the pending list, but not scan that field before that stage, as the field is used by the concurrent GC thread to link discovered objects. When ReferenceHandleR thread does not use the discovered field for the purpose of linking the elements in the pending list, as would be the case in older JDKs, the JVM will fall back to the old behaviour of using the next field for that purpose.
Reviewed-by: jcoomes, mchung, stefank

duke@435 1 /*
ysr@2651 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/javaClasses.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "gc_interface/collectedHeap.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 30 #include "memory/referencePolicy.hpp"
stefank@2314 31 #include "memory/referenceProcessor.hpp"
stefank@2314 32 #include "oops/oop.inline.hpp"
stefank@2314 33 #include "runtime/java.hpp"
stefank@2314 34 #include "runtime/jniHandles.hpp"
duke@435 35
ysr@888 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ysr@888 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
ysr@888 38 const int subclasses_of_ref = REF_PHANTOM - REF_OTHER;
ysr@3117 39 bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
ysr@888 40
duke@435 41 // List of discovered references.
duke@435 42 class DiscoveredList {
duke@435 43 public:
coleenp@548 44 DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
coleenp@548 45 oop head() const {
stefank@3115 46 return UseCompressedOops ? oopDesc::decode_heap_oop(_compressed_head) :
coleenp@548 47 _oop_head;
coleenp@548 48 }
coleenp@548 49 HeapWord* adr_head() {
coleenp@548 50 return UseCompressedOops ? (HeapWord*)&_compressed_head :
coleenp@548 51 (HeapWord*)&_oop_head;
coleenp@548 52 }
coleenp@548 53 void set_head(oop o) {
coleenp@548 54 if (UseCompressedOops) {
coleenp@548 55 // Must compress the head ptr.
stefank@3115 56 _compressed_head = oopDesc::encode_heap_oop(o);
coleenp@548 57 } else {
coleenp@548 58 _oop_head = o;
coleenp@548 59 }
coleenp@548 60 }
stefank@3115 61 bool empty() const { return head() == NULL; }
duke@435 62 size_t length() { return _len; }
ysr@887 63 void set_length(size_t len) { _len = len; }
ysr@887 64 void inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
ysr@887 65 void dec_length(size_t dec) { _len -= dec; }
duke@435 66 private:
coleenp@548 67 // Set value depending on UseCompressedOops. This could be a template class
coleenp@548 68 // but then we have to fix all the instantiations and declarations that use this class.
coleenp@548 69 oop _oop_head;
coleenp@548 70 narrowOop _compressed_head;
duke@435 71 size_t _len;
duke@435 72 };
duke@435 73
duke@435 74 void referenceProcessor_init() {
duke@435 75 ReferenceProcessor::init_statics();
duke@435 76 }
duke@435 77
duke@435 78 void ReferenceProcessor::init_statics() {
duke@435 79 // Initialize the master soft ref clock.
duke@435 80 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
duke@435 81
ysr@888 82 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
ysr@888 83 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
ysr@888 84 NOT_COMPILER2(LRUCurrentHeapPolicy());
ysr@888 85 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
ysr@888 86 vm_exit_during_initialization("Could not allocate reference policy object");
ysr@888 87 }
duke@435 88 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@435 89 RefDiscoveryPolicy == ReferentBasedDiscovery,
duke@435 90 "Unrecongnized RefDiscoveryPolicy");
ysr@3117 91 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
duke@435 92 }
duke@435 93
duke@435 94 ReferenceProcessor::ReferenceProcessor(MemRegion span,
ysr@2651 95 bool mt_processing,
ysr@2651 96 int mt_processing_degree,
ysr@2651 97 bool mt_discovery,
ysr@2651 98 int mt_discovery_degree,
coleenp@548 99 bool atomic_discovery,
ysr@2651 100 BoolObjectClosure* is_alive_non_header,
ysr@777 101 bool discovered_list_needs_barrier) :
duke@435 102 _discovering_refs(false),
duke@435 103 _enqueuing_is_done(false),
ysr@2651 104 _is_alive_non_header(is_alive_non_header),
ysr@777 105 _discovered_list_needs_barrier(discovered_list_needs_barrier),
ysr@777 106 _bs(NULL),
duke@435 107 _processing_is_mt(mt_processing),
duke@435 108 _next_id(0)
duke@435 109 {
duke@435 110 _span = span;
duke@435 111 _discovery_is_atomic = atomic_discovery;
duke@435 112 _discovery_is_mt = mt_discovery;
ysr@2651 113 _num_q = MAX2(1, mt_processing_degree);
ysr@2651 114 _max_num_q = MAX2(_num_q, mt_discovery_degree);
jmasa@2188 115 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
duke@435 116 if (_discoveredSoftRefs == NULL) {
duke@435 117 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@435 118 }
jmasa@2188 119 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
jmasa@2188 120 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
jmasa@2188 121 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
stefank@3115 122 // Initialized all entries to NULL
jmasa@2188 123 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
stefank@3115 124 _discoveredSoftRefs[i].set_head(NULL);
duke@435 125 _discoveredSoftRefs[i].set_length(0);
duke@435 126 }
ysr@3117 127 // If we do barriers, cache a copy of the barrier set.
ysr@777 128 if (discovered_list_needs_barrier) {
ysr@777 129 _bs = Universe::heap()->barrier_set();
ysr@777 130 }
ysr@2651 131 setup_policy(false /* default soft ref policy */);
duke@435 132 }
duke@435 133
duke@435 134 #ifndef PRODUCT
duke@435 135 void ReferenceProcessor::verify_no_references_recorded() {
duke@435 136 guarantee(!_discovering_refs, "Discovering refs?");
jmasa@2188 137 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 138 guarantee(_discoveredSoftRefs[i].empty(),
duke@435 139 "Found non-empty discovered list");
duke@435 140 }
duke@435 141 }
duke@435 142 #endif
duke@435 143
duke@435 144 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
jmasa@2188 145 // Should this instead be
jmasa@2188 146 // for (int i = 0; i < subclasses_of_ref; i++_ {
jmasa@2188 147 // for (int j = 0; j < _num_q; j++) {
jmasa@2188 148 // int index = i * _max_num_q + j;
jmasa@2188 149 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
coleenp@548 150 if (UseCompressedOops) {
coleenp@548 151 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 152 } else {
coleenp@548 153 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 154 }
duke@435 155 }
duke@435 156 }
duke@435 157
coleenp@548 158 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@435 159 // Update (advance) the soft ref master clock field. This must be done
duke@435 160 // after processing the soft ref list.
duke@435 161 jlong now = os::javaTimeMillis();
duke@435 162 jlong clock = java_lang_ref_SoftReference::clock();
duke@435 163 NOT_PRODUCT(
duke@435 164 if (now < clock) {
duke@435 165 warning("time warp: %d to %d", clock, now);
duke@435 166 }
duke@435 167 )
duke@435 168 // In product mode, protect ourselves from system time being adjusted
duke@435 169 // externally and going backward; see note in the implementation of
duke@435 170 // GenCollectedHeap::time_since_last_gc() for the right way to fix
duke@435 171 // this uniformly throughout the VM; see bug-id 4741166. XXX
duke@435 172 if (now > clock) {
duke@435 173 java_lang_ref_SoftReference::set_clock(now);
duke@435 174 }
duke@435 175 // Else leave clock stalled at its old value until time progresses
duke@435 176 // past clock value.
duke@435 177 }
duke@435 178
coleenp@548 179 void ReferenceProcessor::process_discovered_references(
duke@435 180 BoolObjectClosure* is_alive,
duke@435 181 OopClosure* keep_alive,
duke@435 182 VoidClosure* complete_gc,
duke@435 183 AbstractRefProcTaskExecutor* task_executor) {
duke@435 184 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 185
duke@435 186 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@435 187 // Stop treating discovered references specially.
duke@435 188 disable_discovery();
duke@435 189
duke@435 190 bool trace_time = PrintGCDetails && PrintReferenceGC;
duke@435 191 // Soft references
duke@435 192 {
duke@435 193 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
ysr@888 194 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
duke@435 195 is_alive, keep_alive, complete_gc, task_executor);
duke@435 196 }
duke@435 197
duke@435 198 update_soft_ref_master_clock();
duke@435 199
duke@435 200 // Weak references
duke@435 201 {
duke@435 202 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
duke@435 203 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
duke@435 204 is_alive, keep_alive, complete_gc, task_executor);
duke@435 205 }
duke@435 206
duke@435 207 // Final references
duke@435 208 {
duke@435 209 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
duke@435 210 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
duke@435 211 is_alive, keep_alive, complete_gc, task_executor);
duke@435 212 }
duke@435 213
duke@435 214 // Phantom references
duke@435 215 {
duke@435 216 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
duke@435 217 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
duke@435 218 is_alive, keep_alive, complete_gc, task_executor);
duke@435 219 }
duke@435 220
duke@435 221 // Weak global JNI references. It would make more sense (semantically) to
duke@435 222 // traverse these simultaneously with the regular weak references above, but
duke@435 223 // that is not how the JDK1.2 specification is. See #4126360. Native code can
duke@435 224 // thus use JNI weak references to circumvent the phantom references and
duke@435 225 // resurrect a "post-mortem" object.
duke@435 226 {
duke@435 227 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
duke@435 228 if (task_executor != NULL) {
duke@435 229 task_executor->set_single_threaded_mode();
duke@435 230 }
duke@435 231 process_phaseJNI(is_alive, keep_alive, complete_gc);
duke@435 232 }
duke@435 233 }
duke@435 234
duke@435 235 #ifndef PRODUCT
duke@435 236 // Calculate the number of jni handles.
coleenp@548 237 uint ReferenceProcessor::count_jni_refs() {
duke@435 238 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 239 public:
coleenp@548 240 virtual bool do_object_b(oop obj) { return true; }
coleenp@548 241 virtual void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 242 };
duke@435 243
duke@435 244 class CountHandleClosure: public OopClosure {
duke@435 245 private:
duke@435 246 int _count;
duke@435 247 public:
duke@435 248 CountHandleClosure(): _count(0) {}
coleenp@548 249 void do_oop(oop* unused) { _count++; }
coleenp@548 250 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
duke@435 251 int count() { return _count; }
duke@435 252 };
duke@435 253 CountHandleClosure global_handle_count;
duke@435 254 AlwaysAliveClosure always_alive;
duke@435 255 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
duke@435 256 return global_handle_count.count();
duke@435 257 }
duke@435 258 #endif
duke@435 259
duke@435 260 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
duke@435 261 OopClosure* keep_alive,
duke@435 262 VoidClosure* complete_gc) {
duke@435 263 #ifndef PRODUCT
duke@435 264 if (PrintGCDetails && PrintReferenceGC) {
duke@435 265 unsigned int count = count_jni_refs();
duke@435 266 gclog_or_tty->print(", %u refs", count);
duke@435 267 }
duke@435 268 #endif
duke@435 269 JNIHandles::weak_oops_do(is_alive, keep_alive);
duke@435 270 complete_gc->do_void();
duke@435 271 }
duke@435 272
coleenp@548 273
coleenp@548 274 template <class T>
phh@1558 275 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
phh@1558 276 AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 277
duke@435 278 // Remember old value of pending references list
coleenp@548 279 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
coleenp@548 280 T old_pending_list_value = *pending_list_addr;
duke@435 281
duke@435 282 // Enqueue references that are not made active again, and
duke@435 283 // clear the decks for the next collection (cycle).
coleenp@548 284 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
duke@435 285 // Do the oop-check on pending_list_addr missed in
duke@435 286 // enqueue_discovered_reflist. We should probably
duke@435 287 // do a raw oop_check so that future such idempotent
duke@435 288 // oop_stores relying on the oop-check side-effect
duke@435 289 // may be elided automatically and safely without
duke@435 290 // affecting correctness.
coleenp@548 291 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
duke@435 292
duke@435 293 // Stop treating discovered references specially.
coleenp@548 294 ref->disable_discovery();
duke@435 295
duke@435 296 // Return true if new pending references were added
duke@435 297 return old_pending_list_value != *pending_list_addr;
duke@435 298 }
duke@435 299
coleenp@548 300 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 301 NOT_PRODUCT(verify_ok_to_handle_reflists());
coleenp@548 302 if (UseCompressedOops) {
coleenp@548 303 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
coleenp@548 304 } else {
coleenp@548 305 return enqueue_discovered_ref_helper<oop>(this, task_executor);
coleenp@548 306 }
coleenp@548 307 }
coleenp@548 308
duke@435 309 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 310 HeapWord* pending_list_addr) {
duke@435 311 // Given a list of refs linked through the "discovered" field
ysr@3117 312 // (java.lang.ref.Reference.discovered), self-loop their "next" field
ysr@3117 313 // thus distinguishing them from active References, then
ysr@3117 314 // prepend them to the pending list.
ysr@3117 315 // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
ysr@3117 316 // the "next" field is used to chain the pending list, not the discovered
ysr@3117 317 // field.
ysr@3117 318
duke@435 319 if (TraceReferenceGC && PrintGCDetails) {
duke@435 320 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
duke@435 321 INTPTR_FORMAT, (address)refs_list.head());
duke@435 322 }
stefank@3115 323
stefank@3115 324 oop obj = NULL;
ysr@3117 325 oop next_d = refs_list.head();
ysr@3117 326 if (pending_list_uses_discovered_field()) { // New behaviour
ysr@3117 327 // Walk down the list, self-looping the next field
ysr@3117 328 // so that the References are not considered active.
ysr@3117 329 while (obj != next_d) {
ysr@3117 330 obj = next_d;
ysr@3117 331 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 332 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 333 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 334 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 335 obj, next_d);
ysr@3117 336 }
ysr@3117 337 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 338 "Reference not active; should not be discovered");
ysr@3117 339 // Self-loop next, so as to make Ref not active.
ysr@3117 340 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 341 if (next_d == obj) { // obj is last
ysr@3117 342 // Swap refs_list into pendling_list_addr and
ysr@3117 343 // set obj's discovered to what we read from pending_list_addr.
ysr@3117 344 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 345 // Need oop_check on pending_list_addr above;
ysr@3117 346 // see special oop-check code at the end of
ysr@3117 347 // enqueue_discovered_reflists() further below.
ysr@3117 348 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
ysr@3117 349 }
duke@435 350 }
ysr@3117 351 } else { // Old behaviour
ysr@3117 352 // Walk down the list, copying the discovered field into
ysr@3117 353 // the next field and clearing the discovered field.
ysr@3117 354 while (obj != next_d) {
ysr@3117 355 obj = next_d;
ysr@3117 356 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 357 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 358 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 359 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 360 obj, next_d);
ysr@3117 361 }
ysr@3117 362 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 363 "The reference should not be enqueued");
ysr@3117 364 if (next_d == obj) { // obj is last
ysr@3117 365 // Swap refs_list into pendling_list_addr and
ysr@3117 366 // set obj's next to what we read from pending_list_addr.
ysr@3117 367 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 368 // Need oop_check on pending_list_addr above;
ysr@3117 369 // see special oop-check code at the end of
ysr@3117 370 // enqueue_discovered_reflists() further below.
ysr@3117 371 if (old == NULL) {
ysr@3117 372 // obj should be made to point to itself, since
ysr@3117 373 // pending list was empty.
ysr@3117 374 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 375 } else {
ysr@3117 376 java_lang_ref_Reference::set_next(obj, old);
ysr@3117 377 }
duke@435 378 } else {
ysr@3117 379 java_lang_ref_Reference::set_next(obj, next_d);
duke@435 380 }
ysr@3117 381 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 382 }
duke@435 383 }
duke@435 384 }
duke@435 385
duke@435 386 // Parallel enqueue task
duke@435 387 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
duke@435 388 public:
duke@435 389 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
duke@435 390 DiscoveredList discovered_refs[],
coleenp@548 391 HeapWord* pending_list_addr,
duke@435 392 int n_queues)
duke@435 393 : EnqueueTask(ref_processor, discovered_refs,
stefank@3115 394 pending_list_addr, n_queues)
duke@435 395 { }
duke@435 396
coleenp@548 397 virtual void work(unsigned int work_id) {
ysr@2651 398 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
duke@435 399 // Simplest first cut: static partitioning.
duke@435 400 int index = work_id;
jmasa@2188 401 // The increment on "index" must correspond to the maximum number of queues
jmasa@2188 402 // (n_queues) with which that ReferenceProcessor was created. That
jmasa@2188 403 // is because of the "clever" way the discovered references lists were
ysr@2651 404 // allocated and are indexed into.
ysr@2651 405 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
jmasa@2188 406 for (int j = 0;
jmasa@2188 407 j < subclasses_of_ref;
jmasa@2188 408 j++, index += _n_queues) {
duke@435 409 _ref_processor.enqueue_discovered_reflist(
duke@435 410 _refs_lists[index], _pending_list_addr);
stefank@3115 411 _refs_lists[index].set_head(NULL);
duke@435 412 _refs_lists[index].set_length(0);
duke@435 413 }
duke@435 414 }
duke@435 415 };
duke@435 416
duke@435 417 // Enqueue references that are not made active again
coleenp@548 418 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
duke@435 419 AbstractRefProcTaskExecutor* task_executor) {
duke@435 420 if (_processing_is_mt && task_executor != NULL) {
duke@435 421 // Parallel code
duke@435 422 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
stefank@3115 423 pending_list_addr, _max_num_q);
duke@435 424 task_executor->execute(tsk);
duke@435 425 } else {
duke@435 426 // Serial code: call the parent class's implementation
jmasa@2188 427 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
duke@435 428 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
stefank@3115 429 _discoveredSoftRefs[i].set_head(NULL);
duke@435 430 _discoveredSoftRefs[i].set_length(0);
duke@435 431 }
duke@435 432 }
duke@435 433 }
duke@435 434
duke@435 435 // Iterator for the list of discovered references.
duke@435 436 class DiscoveredListIterator {
duke@435 437 public:
duke@435 438 inline DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 439 OopClosure* keep_alive,
duke@435 440 BoolObjectClosure* is_alive);
duke@435 441
duke@435 442 // End Of List.
stefank@3115 443 inline bool has_next() const { return _ref != NULL; }
duke@435 444
duke@435 445 // Get oop to the Reference object.
coleenp@548 446 inline oop obj() const { return _ref; }
duke@435 447
duke@435 448 // Get oop to the referent object.
coleenp@548 449 inline oop referent() const { return _referent; }
duke@435 450
duke@435 451 // Returns true if referent is alive.
duke@435 452 inline bool is_referent_alive() const;
duke@435 453
duke@435 454 // Loads data for the current reference.
duke@435 455 // The "allow_null_referent" argument tells us to allow for the possibility
duke@435 456 // of a NULL referent in the discovered Reference object. This typically
duke@435 457 // happens in the case of concurrent collectors that may have done the
ysr@887 458 // discovery concurrently, or interleaved, with mutator execution.
duke@435 459 inline void load_ptrs(DEBUG_ONLY(bool allow_null_referent));
duke@435 460
duke@435 461 // Move to the next discovered reference.
duke@435 462 inline void next();
duke@435 463
ysr@887 464 // Remove the current reference from the list
duke@435 465 inline void remove();
duke@435 466
duke@435 467 // Make the Reference object active again.
duke@435 468 inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
duke@435 469
duke@435 470 // Make the referent alive.
coleenp@548 471 inline void make_referent_alive() {
coleenp@548 472 if (UseCompressedOops) {
coleenp@548 473 _keep_alive->do_oop((narrowOop*)_referent_addr);
coleenp@548 474 } else {
coleenp@548 475 _keep_alive->do_oop((oop*)_referent_addr);
coleenp@548 476 }
coleenp@548 477 }
duke@435 478
duke@435 479 // Update the discovered field.
coleenp@548 480 inline void update_discovered() {
coleenp@548 481 // First _prev_next ref actually points into DiscoveredList (gross).
coleenp@548 482 if (UseCompressedOops) {
stefank@3115 483 if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
stefank@3115 484 _keep_alive->do_oop((narrowOop*)_prev_next);
stefank@3115 485 }
coleenp@548 486 } else {
stefank@3115 487 if (!oopDesc::is_null(*(oop*)_prev_next)) {
stefank@3115 488 _keep_alive->do_oop((oop*)_prev_next);
stefank@3115 489 }
coleenp@548 490 }
coleenp@548 491 }
duke@435 492
duke@435 493 // NULL out referent pointer.
coleenp@548 494 inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
duke@435 495
duke@435 496 // Statistics
duke@435 497 NOT_PRODUCT(
duke@435 498 inline size_t processed() const { return _processed; }
duke@435 499 inline size_t removed() const { return _removed; }
duke@435 500 )
duke@435 501
duke@435 502 inline void move_to_next();
duke@435 503
duke@435 504 private:
duke@435 505 DiscoveredList& _refs_list;
coleenp@548 506 HeapWord* _prev_next;
stefank@3115 507 oop _prev;
duke@435 508 oop _ref;
coleenp@548 509 HeapWord* _discovered_addr;
duke@435 510 oop _next;
coleenp@548 511 HeapWord* _referent_addr;
duke@435 512 oop _referent;
duke@435 513 OopClosure* _keep_alive;
duke@435 514 BoolObjectClosure* _is_alive;
duke@435 515 DEBUG_ONLY(
duke@435 516 oop _first_seen; // cyclic linked list check
duke@435 517 )
duke@435 518 NOT_PRODUCT(
duke@435 519 size_t _processed;
duke@435 520 size_t _removed;
duke@435 521 )
duke@435 522 };
duke@435 523
duke@435 524 inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_list,
duke@435 525 OopClosure* keep_alive,
duke@435 526 BoolObjectClosure* is_alive)
duke@435 527 : _refs_list(refs_list),
coleenp@548 528 _prev_next(refs_list.adr_head()),
stefank@3115 529 _prev(NULL),
duke@435 530 _ref(refs_list.head()),
duke@435 531 #ifdef ASSERT
duke@435 532 _first_seen(refs_list.head()),
duke@435 533 #endif
duke@435 534 #ifndef PRODUCT
duke@435 535 _processed(0),
duke@435 536 _removed(0),
duke@435 537 #endif
stefank@3115 538 _next(NULL),
duke@435 539 _keep_alive(keep_alive),
duke@435 540 _is_alive(is_alive)
duke@435 541 { }
duke@435 542
coleenp@548 543 inline bool DiscoveredListIterator::is_referent_alive() const {
duke@435 544 return _is_alive->do_object_b(_referent);
duke@435 545 }
duke@435 546
coleenp@548 547 inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
duke@435 548 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
coleenp@548 549 oop discovered = java_lang_ref_Reference::discovered(_ref);
coleenp@548 550 assert(_discovered_addr && discovered->is_oop_or_null(),
duke@435 551 "discovered field is bad");
coleenp@548 552 _next = discovered;
duke@435 553 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
coleenp@548 554 _referent = java_lang_ref_Reference::referent(_ref);
duke@435 555 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@435 556 "Wrong oop found in java.lang.Reference object");
duke@435 557 assert(allow_null_referent ?
duke@435 558 _referent->is_oop_or_null()
duke@435 559 : _referent->is_oop(),
duke@435 560 "bad referent");
duke@435 561 }
duke@435 562
coleenp@548 563 inline void DiscoveredListIterator::next() {
duke@435 564 _prev_next = _discovered_addr;
stefank@3115 565 _prev = _ref;
duke@435 566 move_to_next();
duke@435 567 }
duke@435 568
coleenp@548 569 inline void DiscoveredListIterator::remove() {
duke@435 570 assert(_ref->is_oop(), "Dropping a bad reference");
coleenp@548 571 oop_store_raw(_discovered_addr, NULL);
stefank@3115 572
coleenp@548 573 // First _prev_next ref actually points into DiscoveredList (gross).
stefank@3115 574 oop new_next;
stefank@3115 575 if (_next == _ref) {
stefank@3115 576 // At the end of the list, we should make _prev point to itself.
stefank@3115 577 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
stefank@3115 578 // and _prev will be NULL.
stefank@3115 579 new_next = _prev;
stefank@3115 580 } else {
stefank@3115 581 new_next = _next;
stefank@3115 582 }
stefank@3115 583
coleenp@548 584 if (UseCompressedOops) {
coleenp@548 585 // Remove Reference object from list.
stefank@3115 586 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
coleenp@548 587 } else {
coleenp@548 588 // Remove Reference object from list.
stefank@3115 589 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
coleenp@548 590 }
duke@435 591 NOT_PRODUCT(_removed++);
ysr@887 592 _refs_list.dec_length(1);
duke@435 593 }
duke@435 594
coleenp@548 595 inline void DiscoveredListIterator::move_to_next() {
stefank@3115 596 if (_ref == _next) {
stefank@3115 597 // End of the list.
stefank@3115 598 _ref = NULL;
stefank@3115 599 } else {
stefank@3115 600 _ref = _next;
stefank@3115 601 }
duke@435 602 assert(_ref != _first_seen, "cyclic ref_list found");
duke@435 603 NOT_PRODUCT(_processed++);
duke@435 604 }
duke@435 605
duke@435 606 // NOTE: process_phase*() are largely similar, and at a high level
duke@435 607 // merely iterate over the extant list applying a predicate to
duke@435 608 // each of its elements and possibly removing that element from the
duke@435 609 // list and applying some further closures to that element.
duke@435 610 // We should consider the possibility of replacing these
duke@435 611 // process_phase*() methods by abstracting them into
duke@435 612 // a single general iterator invocation that receives appropriate
duke@435 613 // closures that accomplish this work.
duke@435 614
duke@435 615 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@435 616 // referents are not alive, but that should be kept alive for policy reasons.
duke@435 617 // Keep alive the transitive closure of all such referents.
duke@435 618 void
coleenp@548 619 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@435 620 ReferencePolicy* policy,
duke@435 621 BoolObjectClosure* is_alive,
duke@435 622 OopClosure* keep_alive,
duke@435 623 VoidClosure* complete_gc) {
duke@435 624 assert(policy != NULL, "Must have a non-NULL policy");
coleenp@548 625 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 626 // Decide which softly reachable refs should be kept alive.
duke@435 627 while (iter.has_next()) {
duke@435 628 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@435 629 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
duke@435 630 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
duke@435 631 if (TraceReferenceGC) {
duke@435 632 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
coleenp@548 633 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 634 }
ysr@887 635 // Remove Reference object from list
ysr@887 636 iter.remove();
duke@435 637 // Make the Reference object active again
duke@435 638 iter.make_active();
duke@435 639 // keep the referent around
duke@435 640 iter.make_referent_alive();
ysr@887 641 iter.move_to_next();
duke@435 642 } else {
duke@435 643 iter.next();
duke@435 644 }
duke@435 645 }
duke@435 646 // Close the reachable set
duke@435 647 complete_gc->do_void();
duke@435 648 NOT_PRODUCT(
duke@435 649 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 650 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
ysr@3117 651 "discovered Refs by policy, from list " INTPTR_FORMAT,
jmasa@2188 652 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 653 }
duke@435 654 )
duke@435 655 }
duke@435 656
duke@435 657 // Traverse the list and remove any Refs that are not active, or
duke@435 658 // whose referents are either alive or NULL.
duke@435 659 void
coleenp@548 660 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@435 661 BoolObjectClosure* is_alive,
coleenp@548 662 OopClosure* keep_alive) {
duke@435 663 assert(discovery_is_atomic(), "Error");
coleenp@548 664 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 665 while (iter.has_next()) {
duke@435 666 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@548 667 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@548 668 assert(next == NULL, "Should not discover inactive Reference");
duke@435 669 if (iter.is_referent_alive()) {
duke@435 670 if (TraceReferenceGC) {
duke@435 671 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
coleenp@548 672 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 673 }
duke@435 674 // The referent is reachable after all.
ysr@887 675 // Remove Reference object from list.
ysr@887 676 iter.remove();
duke@435 677 // Update the referent pointer as necessary: Note that this
duke@435 678 // should not entail any recursive marking because the
duke@435 679 // referent must already have been traversed.
duke@435 680 iter.make_referent_alive();
ysr@887 681 iter.move_to_next();
duke@435 682 } else {
duke@435 683 iter.next();
duke@435 684 }
duke@435 685 }
duke@435 686 NOT_PRODUCT(
ysr@2651 687 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 688 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 689 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 690 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 691 }
duke@435 692 )
duke@435 693 }
duke@435 694
duke@435 695 void
coleenp@548 696 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@548 697 BoolObjectClosure* is_alive,
coleenp@548 698 OopClosure* keep_alive,
coleenp@548 699 VoidClosure* complete_gc) {
duke@435 700 assert(!discovery_is_atomic(), "Error");
coleenp@548 701 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 702 while (iter.has_next()) {
duke@435 703 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 704 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
coleenp@548 705 oop next = java_lang_ref_Reference::next(iter.obj());
duke@435 706 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 707 next != NULL)) {
coleenp@548 708 assert(next->is_oop_or_null(), "bad next field");
duke@435 709 // Remove Reference object from list
duke@435 710 iter.remove();
duke@435 711 // Trace the cohorts
duke@435 712 iter.make_referent_alive();
coleenp@548 713 if (UseCompressedOops) {
coleenp@548 714 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@548 715 } else {
coleenp@548 716 keep_alive->do_oop((oop*)next_addr);
coleenp@548 717 }
ysr@887 718 iter.move_to_next();
duke@435 719 } else {
duke@435 720 iter.next();
duke@435 721 }
duke@435 722 }
duke@435 723 // Now close the newly reachable set
duke@435 724 complete_gc->do_void();
duke@435 725 NOT_PRODUCT(
ysr@2651 726 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 727 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 728 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 729 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 730 }
duke@435 731 )
duke@435 732 }
duke@435 733
duke@435 734 // Traverse the list and process the referents, by either
coleenp@548 735 // clearing them or keeping them (and their reachable
duke@435 736 // closure) alive.
duke@435 737 void
coleenp@548 738 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
duke@435 739 bool clear_referent,
duke@435 740 BoolObjectClosure* is_alive,
duke@435 741 OopClosure* keep_alive,
duke@435 742 VoidClosure* complete_gc) {
jmasa@2188 743 ResourceMark rm;
coleenp@548 744 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 745 while (iter.has_next()) {
duke@435 746 iter.update_discovered();
duke@435 747 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@435 748 if (clear_referent) {
duke@435 749 // NULL out referent pointer
duke@435 750 iter.clear_referent();
duke@435 751 } else {
duke@435 752 // keep the referent around
duke@435 753 iter.make_referent_alive();
duke@435 754 }
duke@435 755 if (TraceReferenceGC) {
duke@435 756 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
duke@435 757 clear_referent ? "cleared " : "",
coleenp@548 758 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 759 }
duke@435 760 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
duke@435 761 iter.next();
duke@435 762 }
stefank@3115 763 // Remember to update the next pointer of the last ref.
duke@435 764 iter.update_discovered();
duke@435 765 // Close the reachable set
duke@435 766 complete_gc->do_void();
duke@435 767 }
duke@435 768
duke@435 769 void
stefank@3115 770 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
stefank@3115 771 oop obj = NULL;
stefank@3115 772 oop next = refs_list.head();
stefank@3115 773 while (next != obj) {
stefank@3115 774 obj = next;
stefank@3115 775 next = java_lang_ref_Reference::discovered(obj);
stefank@3115 776 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
stefank@3115 777 }
stefank@3115 778 refs_list.set_head(NULL);
stefank@3115 779 refs_list.set_length(0);
stefank@3115 780 }
stefank@3115 781
stefank@3115 782 void
coleenp@548 783 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
stefank@3115 784 clear_discovered_references(refs_list);
duke@435 785 }
duke@435 786
ysr@777 787 void ReferenceProcessor::abandon_partial_discovery() {
ysr@777 788 // loop over the lists
jmasa@2188 789 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
jmasa@2188 790 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
johnc@2316 791 gclog_or_tty->print_cr("\nAbandoning %s discovered list",
johnc@2316 792 list_name(i));
ysr@777 793 }
ysr@777 794 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
duke@435 795 }
duke@435 796 }
duke@435 797
duke@435 798 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 799 public:
duke@435 800 RefProcPhase1Task(ReferenceProcessor& ref_processor,
duke@435 801 DiscoveredList refs_lists[],
duke@435 802 ReferencePolicy* policy,
duke@435 803 bool marks_oops_alive)
duke@435 804 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 805 _policy(policy)
duke@435 806 { }
duke@435 807 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 808 OopClosure& keep_alive,
duke@435 809 VoidClosure& complete_gc)
duke@435 810 {
jmasa@2188 811 Thread* thr = Thread::current();
jmasa@2188 812 int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 813 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
duke@435 814 &is_alive, &keep_alive, &complete_gc);
duke@435 815 }
duke@435 816 private:
duke@435 817 ReferencePolicy* _policy;
duke@435 818 };
duke@435 819
duke@435 820 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 821 public:
duke@435 822 RefProcPhase2Task(ReferenceProcessor& ref_processor,
duke@435 823 DiscoveredList refs_lists[],
duke@435 824 bool marks_oops_alive)
duke@435 825 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
duke@435 826 { }
duke@435 827 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 828 OopClosure& keep_alive,
duke@435 829 VoidClosure& complete_gc)
duke@435 830 {
duke@435 831 _ref_processor.process_phase2(_refs_lists[i],
duke@435 832 &is_alive, &keep_alive, &complete_gc);
duke@435 833 }
duke@435 834 };
duke@435 835
duke@435 836 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 837 public:
duke@435 838 RefProcPhase3Task(ReferenceProcessor& ref_processor,
duke@435 839 DiscoveredList refs_lists[],
duke@435 840 bool clear_referent,
duke@435 841 bool marks_oops_alive)
duke@435 842 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 843 _clear_referent(clear_referent)
duke@435 844 { }
duke@435 845 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 846 OopClosure& keep_alive,
duke@435 847 VoidClosure& complete_gc)
duke@435 848 {
jmasa@2188 849 // Don't use "refs_list_index" calculated in this way because
jmasa@2188 850 // balance_queues() has moved the Ref's into the first n queues.
jmasa@2188 851 // Thread* thr = Thread::current();
jmasa@2188 852 // int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 853 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
duke@435 854 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@435 855 &is_alive, &keep_alive, &complete_gc);
duke@435 856 }
duke@435 857 private:
duke@435 858 bool _clear_referent;
duke@435 859 };
duke@435 860
duke@435 861 // Balances reference queues.
jmasa@2188 862 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
jmasa@2188 863 // queues[0, 1, ..., _num_q-1] because only the first _num_q
jmasa@2188 864 // corresponding to the active workers will be processed.
duke@435 865 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@435 866 {
duke@435 867 // calculate total length
duke@435 868 size_t total_refs = 0;
jmasa@2188 869 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 870 gclog_or_tty->print_cr("\nBalance ref_lists ");
jmasa@2188 871 }
jmasa@2188 872
jmasa@2188 873 for (int i = 0; i < _max_num_q; ++i) {
duke@435 874 total_refs += ref_lists[i].length();
jmasa@2188 875 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 876 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 877 }
jmasa@2188 878 }
jmasa@2188 879 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 880 gclog_or_tty->print_cr(" = %d", total_refs);
duke@435 881 }
duke@435 882 size_t avg_refs = total_refs / _num_q + 1;
duke@435 883 int to_idx = 0;
jmasa@2188 884 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
jmasa@2188 885 bool move_all = false;
jmasa@2188 886 if (from_idx >= _num_q) {
jmasa@2188 887 move_all = ref_lists[from_idx].length() > 0;
jmasa@2188 888 }
jmasa@2188 889 while ((ref_lists[from_idx].length() > avg_refs) ||
jmasa@2188 890 move_all) {
duke@435 891 assert(to_idx < _num_q, "Sanity Check!");
duke@435 892 if (ref_lists[to_idx].length() < avg_refs) {
duke@435 893 // move superfluous refs
jmasa@2188 894 size_t refs_to_move;
jmasa@2188 895 // Move all the Ref's if the from queue will not be processed.
jmasa@2188 896 if (move_all) {
jmasa@2188 897 refs_to_move = MIN2(ref_lists[from_idx].length(),
jmasa@2188 898 avg_refs - ref_lists[to_idx].length());
jmasa@2188 899 } else {
jmasa@2188 900 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
jmasa@2188 901 avg_refs - ref_lists[to_idx].length());
jmasa@2188 902 }
stefank@3115 903
stefank@3115 904 assert(refs_to_move > 0, "otherwise the code below will fail");
stefank@3115 905
duke@435 906 oop move_head = ref_lists[from_idx].head();
duke@435 907 oop move_tail = move_head;
duke@435 908 oop new_head = move_head;
duke@435 909 // find an element to split the list on
duke@435 910 for (size_t j = 0; j < refs_to_move; ++j) {
duke@435 911 move_tail = new_head;
coleenp@548 912 new_head = java_lang_ref_Reference::discovered(new_head);
duke@435 913 }
stefank@3115 914
stefank@3115 915 // Add the chain to the to list.
stefank@3115 916 if (ref_lists[to_idx].head() == NULL) {
stefank@3115 917 // to list is empty. Make a loop at the end.
stefank@3115 918 java_lang_ref_Reference::set_discovered(move_tail, move_tail);
stefank@3115 919 } else {
stefank@3115 920 java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
stefank@3115 921 }
duke@435 922 ref_lists[to_idx].set_head(move_head);
ysr@887 923 ref_lists[to_idx].inc_length(refs_to_move);
stefank@3115 924
stefank@3115 925 // Remove the chain from the from list.
stefank@3115 926 if (move_tail == new_head) {
stefank@3115 927 // We found the end of the from list.
stefank@3115 928 ref_lists[from_idx].set_head(NULL);
stefank@3115 929 } else {
stefank@3115 930 ref_lists[from_idx].set_head(new_head);
stefank@3115 931 }
ysr@887 932 ref_lists[from_idx].dec_length(refs_to_move);
jmasa@2188 933 if (ref_lists[from_idx].length() == 0) {
jmasa@2188 934 break;
jmasa@2188 935 }
duke@435 936 } else {
jmasa@2188 937 to_idx = (to_idx + 1) % _num_q;
duke@435 938 }
duke@435 939 }
duke@435 940 }
jmasa@2188 941 #ifdef ASSERT
jmasa@2188 942 size_t balanced_total_refs = 0;
jmasa@2188 943 for (int i = 0; i < _max_num_q; ++i) {
jmasa@2188 944 balanced_total_refs += ref_lists[i].length();
jmasa@2188 945 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 946 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 947 }
jmasa@2188 948 }
jmasa@2188 949 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 950 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
jmasa@2188 951 gclog_or_tty->flush();
jmasa@2188 952 }
jmasa@2188 953 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
jmasa@2188 954 #endif
jmasa@2188 955 }
jmasa@2188 956
jmasa@2188 957 void ReferenceProcessor::balance_all_queues() {
jmasa@2188 958 balance_queues(_discoveredSoftRefs);
jmasa@2188 959 balance_queues(_discoveredWeakRefs);
jmasa@2188 960 balance_queues(_discoveredFinalRefs);
jmasa@2188 961 balance_queues(_discoveredPhantomRefs);
duke@435 962 }
duke@435 963
duke@435 964 void
duke@435 965 ReferenceProcessor::process_discovered_reflist(
duke@435 966 DiscoveredList refs_lists[],
duke@435 967 ReferencePolicy* policy,
duke@435 968 bool clear_referent,
duke@435 969 BoolObjectClosure* is_alive,
duke@435 970 OopClosure* keep_alive,
duke@435 971 VoidClosure* complete_gc,
duke@435 972 AbstractRefProcTaskExecutor* task_executor)
duke@435 973 {
jmasa@2188 974 bool mt_processing = task_executor != NULL && _processing_is_mt;
jmasa@2188 975 // If discovery used MT and a dynamic number of GC threads, then
jmasa@2188 976 // the queues must be balanced for correctness if fewer than the
jmasa@2188 977 // maximum number of queues were used. The number of queue used
jmasa@2188 978 // during discovery may be different than the number to be used
jmasa@2188 979 // for processing so don't depend of _num_q < _max_num_q as part
jmasa@2188 980 // of the test.
jmasa@2188 981 bool must_balance = _discovery_is_mt;
jmasa@2188 982
jmasa@2188 983 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
jmasa@2188 984 must_balance) {
duke@435 985 balance_queues(refs_lists);
duke@435 986 }
duke@435 987 if (PrintReferenceGC && PrintGCDetails) {
duke@435 988 size_t total = 0;
ysr@2651 989 for (int i = 0; i < _max_num_q; ++i) {
duke@435 990 total += refs_lists[i].length();
duke@435 991 }
duke@435 992 gclog_or_tty->print(", %u refs", total);
duke@435 993 }
duke@435 994
duke@435 995 // Phase 1 (soft refs only):
duke@435 996 // . Traverse the list and remove any SoftReferences whose
duke@435 997 // referents are not alive, but that should be kept alive for
duke@435 998 // policy reasons. Keep alive the transitive closure of all
duke@435 999 // such referents.
duke@435 1000 if (policy != NULL) {
jmasa@2188 1001 if (mt_processing) {
duke@435 1002 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
duke@435 1003 task_executor->execute(phase1);
duke@435 1004 } else {
ysr@2651 1005 for (int i = 0; i < _max_num_q; i++) {
duke@435 1006 process_phase1(refs_lists[i], policy,
duke@435 1007 is_alive, keep_alive, complete_gc);
duke@435 1008 }
duke@435 1009 }
duke@435 1010 } else { // policy == NULL
duke@435 1011 assert(refs_lists != _discoveredSoftRefs,
duke@435 1012 "Policy must be specified for soft references.");
duke@435 1013 }
duke@435 1014
duke@435 1015 // Phase 2:
duke@435 1016 // . Traverse the list and remove any refs whose referents are alive.
jmasa@2188 1017 if (mt_processing) {
duke@435 1018 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
duke@435 1019 task_executor->execute(phase2);
duke@435 1020 } else {
ysr@2651 1021 for (int i = 0; i < _max_num_q; i++) {
duke@435 1022 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
duke@435 1023 }
duke@435 1024 }
duke@435 1025
duke@435 1026 // Phase 3:
duke@435 1027 // . Traverse the list and process referents as appropriate.
jmasa@2188 1028 if (mt_processing) {
duke@435 1029 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
duke@435 1030 task_executor->execute(phase3);
duke@435 1031 } else {
ysr@2651 1032 for (int i = 0; i < _max_num_q; i++) {
duke@435 1033 process_phase3(refs_lists[i], clear_referent,
duke@435 1034 is_alive, keep_alive, complete_gc);
duke@435 1035 }
duke@435 1036 }
duke@435 1037 }
duke@435 1038
duke@435 1039 void ReferenceProcessor::clean_up_discovered_references() {
duke@435 1040 // loop over the lists
jmasa@2188 1041 // Should this instead be
jmasa@2188 1042 // for (int i = 0; i < subclasses_of_ref; i++_ {
jmasa@2188 1043 // for (int j = 0; j < _num_q; j++) {
jmasa@2188 1044 // int index = i * _max_num_q + j;
jmasa@2188 1045 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
ysr@2651 1046 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
duke@435 1047 gclog_or_tty->print_cr(
duke@435 1048 "\nScrubbing %s discovered list of Null referents",
duke@435 1049 list_name(i));
duke@435 1050 }
duke@435 1051 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
duke@435 1052 }
duke@435 1053 }
duke@435 1054
duke@435 1055 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
duke@435 1056 assert(!discovery_is_atomic(), "Else why call this method?");
duke@435 1057 DiscoveredListIterator iter(refs_list, NULL, NULL);
duke@435 1058 while (iter.has_next()) {
duke@435 1059 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1060 oop next = java_lang_ref_Reference::next(iter.obj());
coleenp@548 1061 assert(next->is_oop_or_null(), "bad next field");
duke@435 1062 // If referent has been cleared or Reference is not active,
duke@435 1063 // drop it.
coleenp@548 1064 if (iter.referent() == NULL || next != NULL) {
duke@435 1065 debug_only(
duke@435 1066 if (PrintGCDetails && TraceReferenceGC) {
duke@435 1067 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
duke@435 1068 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
duke@435 1069 " and referent: " INTPTR_FORMAT,
coleenp@548 1070 iter.obj(), next, iter.referent());
duke@435 1071 }
duke@435 1072 )
duke@435 1073 // Remove Reference object from list
duke@435 1074 iter.remove();
ysr@887 1075 iter.move_to_next();
duke@435 1076 } else {
duke@435 1077 iter.next();
duke@435 1078 }
duke@435 1079 }
duke@435 1080 NOT_PRODUCT(
duke@435 1081 if (PrintGCDetails && TraceReferenceGC) {
duke@435 1082 gclog_or_tty->print(
duke@435 1083 " Removed %d Refs with NULL referents out of %d discovered Refs",
duke@435 1084 iter.removed(), iter.processed());
duke@435 1085 }
duke@435 1086 )
duke@435 1087 }
duke@435 1088
duke@435 1089 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
duke@435 1090 int id = 0;
duke@435 1091 // Determine the queue index to use for this object.
duke@435 1092 if (_discovery_is_mt) {
duke@435 1093 // During a multi-threaded discovery phase,
duke@435 1094 // each thread saves to its "own" list.
duke@435 1095 Thread* thr = Thread::current();
johnc@2316 1096 id = thr->as_Worker_thread()->id();
duke@435 1097 } else {
duke@435 1098 // single-threaded discovery, we save in round-robin
duke@435 1099 // fashion to each of the lists.
duke@435 1100 if (_processing_is_mt) {
duke@435 1101 id = next_id();
duke@435 1102 }
duke@435 1103 }
jmasa@2188 1104 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
duke@435 1105
duke@435 1106 // Get the discovered queue to which we will add
duke@435 1107 DiscoveredList* list = NULL;
duke@435 1108 switch (rt) {
duke@435 1109 case REF_OTHER:
duke@435 1110 // Unknown reference type, no special treatment
duke@435 1111 break;
duke@435 1112 case REF_SOFT:
duke@435 1113 list = &_discoveredSoftRefs[id];
duke@435 1114 break;
duke@435 1115 case REF_WEAK:
duke@435 1116 list = &_discoveredWeakRefs[id];
duke@435 1117 break;
duke@435 1118 case REF_FINAL:
duke@435 1119 list = &_discoveredFinalRefs[id];
duke@435 1120 break;
duke@435 1121 case REF_PHANTOM:
duke@435 1122 list = &_discoveredPhantomRefs[id];
duke@435 1123 break;
duke@435 1124 case REF_NONE:
duke@435 1125 // we should not reach here if we are an instanceRefKlass
duke@435 1126 default:
duke@435 1127 ShouldNotReachHere();
duke@435 1128 }
jmasa@2188 1129 if (TraceReferenceGC && PrintGCDetails) {
johnc@2316 1130 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
jmasa@2188 1131 }
duke@435 1132 return list;
duke@435 1133 }
duke@435 1134
coleenp@548 1135 inline void
coleenp@548 1136 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@548 1137 oop obj,
coleenp@548 1138 HeapWord* discovered_addr) {
duke@435 1139 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@435 1140 // First we must make sure this object is only enqueued once. CAS in a non null
duke@435 1141 // discovered_addr.
ysr@777 1142 oop current_head = refs_list.head();
stefank@3115 1143 // The last ref must have its discovered field pointing to itself.
stefank@3115 1144 oop next_discovered = (current_head != NULL) ? current_head : obj;
ysr@777 1145
ysr@1280 1146 // Note: In the case of G1, this specific pre-barrier is strictly
ysr@777 1147 // not necessary because the only case we are interested in
ysr@1280 1148 // here is when *discovered_addr is NULL (see the CAS further below),
ysr@1280 1149 // so this will expand to nothing. As a result, we have manually
ysr@1280 1150 // elided this out for G1, but left in the test for some future
ysr@3117 1151 // collector that might have need for a pre-barrier here, e.g.:-
ysr@3117 1152 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@3117 1153 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1154 "Need to check non-G1 collector: "
ysr@3117 1155 "may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1156 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
coleenp@548 1157 NULL);
duke@435 1158 if (retest == NULL) {
duke@435 1159 // This thread just won the right to enqueue the object.
ysr@3117 1160 // We have separate lists for enqueueing, so no synchronization
duke@435 1161 // is necessary.
coleenp@548 1162 refs_list.set_head(obj);
ysr@887 1163 refs_list.inc_length(1);
ysr@777 1164 if (_discovered_list_needs_barrier) {
stefank@3115 1165 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1166 }
johnc@2316 1167
johnc@2316 1168 if (TraceReferenceGC) {
ysr@3117 1169 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
johnc@2316 1170 obj, obj->blueprint()->internal_name());
johnc@2316 1171 }
duke@435 1172 } else {
duke@435 1173 // If retest was non NULL, another thread beat us to it:
duke@435 1174 // The reference has already been discovered...
duke@435 1175 if (TraceReferenceGC) {
ysr@3117 1176 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
duke@435 1177 obj, obj->blueprint()->internal_name());
duke@435 1178 }
duke@435 1179 }
duke@435 1180 }
duke@435 1181
ysr@2337 1182 #ifndef PRODUCT
ysr@2337 1183 // Non-atomic (i.e. concurrent) discovery might allow us
ysr@2337 1184 // to observe j.l.References with NULL referents, being those
ysr@2337 1185 // cleared concurrently by mutators during (or after) discovery.
ysr@2337 1186 void ReferenceProcessor::verify_referent(oop obj) {
ysr@2337 1187 bool da = discovery_is_atomic();
ysr@2337 1188 oop referent = java_lang_ref_Reference::referent(obj);
ysr@2337 1189 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
ysr@2337 1190 err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
ysr@2337 1191 INTPTR_FORMAT " during %satomic discovery ",
ysr@2337 1192 (intptr_t)referent, (intptr_t)obj, da ? "" : "non-"));
ysr@2337 1193 }
ysr@2337 1194 #endif
ysr@2337 1195
duke@435 1196 // We mention two of several possible choices here:
duke@435 1197 // #0: if the reference object is not in the "originating generation"
duke@435 1198 // (or part of the heap being collected, indicated by our "span"
duke@435 1199 // we don't treat it specially (i.e. we scan it as we would
duke@435 1200 // a normal oop, treating its references as strong references).
ysr@3117 1201 // This means that references can't be discovered unless their
duke@435 1202 // referent is also in the same span. This is the simplest,
duke@435 1203 // most "local" and most conservative approach, albeit one
duke@435 1204 // that may cause weak references to be enqueued least promptly.
duke@435 1205 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@435 1206 // #1: the reference object may be in any generation (span), but if
duke@435 1207 // the referent is in the generation (span) being currently collected
duke@435 1208 // then we can discover the reference object, provided
duke@435 1209 // the object has not already been discovered by
duke@435 1210 // a different concurrently running collector (as may be the
duke@435 1211 // case, for instance, if the reference object is in CMS and
duke@435 1212 // the referent in DefNewGeneration), and provided the processing
duke@435 1213 // of this reference object by the current collector will
duke@435 1214 // appear atomic to every other collector in the system.
duke@435 1215 // (Thus, for instance, a concurrent collector may not
duke@435 1216 // discover references in other generations even if the
duke@435 1217 // referent is in its own generation). This policy may,
duke@435 1218 // in certain cases, enqueue references somewhat sooner than
duke@435 1219 // might Policy #0 above, but at marginally increased cost
duke@435 1220 // and complexity in processing these references.
duke@435 1221 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@435 1222 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
ysr@3117 1223 // Make sure we are discovering refs (rather than processing discovered refs).
duke@435 1224 if (!_discovering_refs || !RegisterReferences) {
duke@435 1225 return false;
duke@435 1226 }
ysr@3117 1227 // We only discover active references.
coleenp@548 1228 oop next = java_lang_ref_Reference::next(obj);
ysr@3117 1229 if (next != NULL) { // Ref is no longer active
duke@435 1230 return false;
duke@435 1231 }
duke@435 1232
duke@435 1233 HeapWord* obj_addr = (HeapWord*)obj;
duke@435 1234 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1235 !_span.contains(obj_addr)) {
duke@435 1236 // Reference is not in the originating generation;
duke@435 1237 // don't treat it specially (i.e. we want to scan it as a normal
duke@435 1238 // object with strong references).
duke@435 1239 return false;
duke@435 1240 }
duke@435 1241
ysr@3117 1242 // We only discover references whose referents are not (yet)
ysr@3117 1243 // known to be strongly reachable.
duke@435 1244 if (is_alive_non_header() != NULL) {
ysr@2337 1245 verify_referent(obj);
ysr@2337 1246 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
duke@435 1247 return false; // referent is reachable
duke@435 1248 }
duke@435 1249 }
ysr@888 1250 if (rt == REF_SOFT) {
ysr@888 1251 // For soft refs we can decide now if these are not
ysr@888 1252 // current candidates for clearing, in which case we
ysr@888 1253 // can mark through them now, rather than delaying that
ysr@888 1254 // to the reference-processing phase. Since all current
ysr@888 1255 // time-stamp policies advance the soft-ref clock only
ysr@888 1256 // at a major collection cycle, this is always currently
ysr@888 1257 // accurate.
ysr@888 1258 if (!_current_soft_ref_policy->should_clear_reference(obj)) {
ysr@888 1259 return false;
ysr@888 1260 }
ysr@888 1261 }
duke@435 1262
ysr@777 1263 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
ysr@777 1264 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 1265 assert(discovered->is_oop_or_null(), "bad discovered field");
coleenp@548 1266 if (discovered != NULL) {
duke@435 1267 // The reference has already been discovered...
duke@435 1268 if (TraceReferenceGC) {
ysr@3117 1269 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1270 obj, obj->blueprint()->internal_name());
duke@435 1271 }
duke@435 1272 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1273 // assumes that an object is not processed twice;
duke@435 1274 // if it's been already discovered it must be on another
duke@435 1275 // generation's discovered list; so we won't discover it.
duke@435 1276 return false;
duke@435 1277 } else {
duke@435 1278 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@435 1279 "Unrecognized policy");
duke@435 1280 // Check assumption that an object is not potentially
duke@435 1281 // discovered twice except by concurrent collectors that potentially
duke@435 1282 // trace the same Reference object twice.
johnc@2316 1283 assert(UseConcMarkSweepGC || UseG1GC,
johnc@2316 1284 "Only possible with a concurrent marking collector");
duke@435 1285 return true;
duke@435 1286 }
duke@435 1287 }
duke@435 1288
duke@435 1289 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
ysr@2337 1290 verify_referent(obj);
ysr@3117 1291 // Discover if and only if EITHER:
ysr@3117 1292 // .. reference is in our span, OR
ysr@3117 1293 // .. we are an atomic collector and referent is in our span
duke@435 1294 if (_span.contains(obj_addr) ||
ysr@2337 1295 (discovery_is_atomic() &&
ysr@2337 1296 _span.contains(java_lang_ref_Reference::referent(obj)))) {
duke@435 1297 // should_enqueue = true;
duke@435 1298 } else {
duke@435 1299 return false;
duke@435 1300 }
duke@435 1301 } else {
duke@435 1302 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1303 _span.contains(obj_addr), "code inconsistency");
duke@435 1304 }
duke@435 1305
duke@435 1306 // Get the right type of discovered queue head.
duke@435 1307 DiscoveredList* list = get_discovered_list(rt);
duke@435 1308 if (list == NULL) {
duke@435 1309 return false; // nothing special needs to be done
duke@435 1310 }
duke@435 1311
duke@435 1312 if (_discovery_is_mt) {
duke@435 1313 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@435 1314 } else {
ysr@777 1315 // If "_discovered_list_needs_barrier", we do write barriers when
ysr@777 1316 // updating the discovered reference list. Otherwise, we do a raw store
ysr@777 1317 // here: the field will be visited later when processing the discovered
ysr@777 1318 // references.
ysr@777 1319 oop current_head = list->head();
stefank@3115 1320 // The last ref must have its discovered field pointing to itself.
stefank@3115 1321 oop next_discovered = (current_head != NULL) ? current_head : obj;
stefank@3115 1322
ysr@777 1323 // As in the case further above, since we are over-writing a NULL
ysr@777 1324 // pre-value, we can safely elide the pre-barrier here for the case of G1.
ysr@3117 1325 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@777 1326 assert(discovered == NULL, "control point invariant");
ysr@3117 1327 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1328 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1329 oop_store_raw(discovered_addr, next_discovered);
ysr@777 1330 if (_discovered_list_needs_barrier) {
stefank@3115 1331 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1332 }
duke@435 1333 list->set_head(obj);
ysr@887 1334 list->inc_length(1);
duke@435 1335
johnc@2316 1336 if (TraceReferenceGC) {
ysr@3117 1337 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
johnc@2316 1338 obj, obj->blueprint()->internal_name());
duke@435 1339 }
duke@435 1340 }
ysr@3117 1341 assert(obj->is_oop(), "Discovered a bad reference");
ysr@2337 1342 verify_referent(obj);
duke@435 1343 return true;
duke@435 1344 }
duke@435 1345
duke@435 1346 // Preclean the discovered references by removing those
duke@435 1347 // whose referents are alive, and by marking from those that
duke@435 1348 // are not active. These lists can be handled here
duke@435 1349 // in any order and, indeed, concurrently.
duke@435 1350 void ReferenceProcessor::preclean_discovered_references(
duke@435 1351 BoolObjectClosure* is_alive,
duke@435 1352 OopClosure* keep_alive,
duke@435 1353 VoidClosure* complete_gc,
jmasa@1625 1354 YieldClosure* yield,
jmasa@1625 1355 bool should_unload_classes) {
duke@435 1356
duke@435 1357 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 1358
jmasa@1370 1359 #ifdef ASSERT
jmasa@1370 1360 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
jmasa@1625 1361 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
jmasa@1625 1362 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
jmasa@1625 1363 UseConcMarkSweepGC && should_unload_classes;
jmasa@1370 1364 RememberKlassesChecker mx(must_remember_klasses);
jmasa@1370 1365 #endif
duke@435 1366 // Soft references
duke@435 1367 {
duke@435 1368 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1369 false, gclog_or_tty);
jmasa@2188 1370 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1371 if (yield->should_return()) {
ysr@887 1372 return;
ysr@887 1373 }
duke@435 1374 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@435 1375 keep_alive, complete_gc, yield);
duke@435 1376 }
duke@435 1377 }
duke@435 1378
duke@435 1379 // Weak references
duke@435 1380 {
duke@435 1381 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1382 false, gclog_or_tty);
ysr@2651 1383 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1384 if (yield->should_return()) {
ysr@887 1385 return;
ysr@887 1386 }
duke@435 1387 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@435 1388 keep_alive, complete_gc, yield);
duke@435 1389 }
duke@435 1390 }
duke@435 1391
duke@435 1392 // Final references
duke@435 1393 {
duke@435 1394 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1395 false, gclog_or_tty);
ysr@2651 1396 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1397 if (yield->should_return()) {
ysr@887 1398 return;
ysr@887 1399 }
duke@435 1400 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@435 1401 keep_alive, complete_gc, yield);
duke@435 1402 }
duke@435 1403 }
duke@435 1404
duke@435 1405 // Phantom references
duke@435 1406 {
duke@435 1407 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1408 false, gclog_or_tty);
ysr@2651 1409 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1410 if (yield->should_return()) {
ysr@887 1411 return;
ysr@887 1412 }
duke@435 1413 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@435 1414 keep_alive, complete_gc, yield);
duke@435 1415 }
duke@435 1416 }
duke@435 1417 }
duke@435 1418
duke@435 1419 // Walk the given discovered ref list, and remove all reference objects
duke@435 1420 // whose referents are still alive, whose referents are NULL or which
ysr@887 1421 // are not active (have a non-NULL next field). NOTE: When we are
ysr@887 1422 // thus precleaning the ref lists (which happens single-threaded today),
ysr@887 1423 // we do not disable refs discovery to honour the correct semantics of
ysr@887 1424 // java.lang.Reference. As a result, we need to be careful below
ysr@887 1425 // that ref removal steps interleave safely with ref discovery steps
ysr@887 1426 // (in this thread).
coleenp@548 1427 void
coleenp@548 1428 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 1429 BoolObjectClosure* is_alive,
coleenp@548 1430 OopClosure* keep_alive,
coleenp@548 1431 VoidClosure* complete_gc,
coleenp@548 1432 YieldClosure* yield) {
duke@435 1433 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 1434 while (iter.has_next()) {
duke@435 1435 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1436 oop obj = iter.obj();
coleenp@548 1437 oop next = java_lang_ref_Reference::next(obj);
duke@435 1438 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 1439 next != NULL) {
duke@435 1440 // The referent has been cleared, or is alive, or the Reference is not
duke@435 1441 // active; we need to trace and mark its cohort.
duke@435 1442 if (TraceReferenceGC) {
duke@435 1443 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
duke@435 1444 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 1445 }
duke@435 1446 // Remove Reference object from list
duke@435 1447 iter.remove();
duke@435 1448 // Keep alive its cohort.
duke@435 1449 iter.make_referent_alive();
coleenp@548 1450 if (UseCompressedOops) {
coleenp@548 1451 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1452 keep_alive->do_oop(next_addr);
coleenp@548 1453 } else {
coleenp@548 1454 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1455 keep_alive->do_oop(next_addr);
coleenp@548 1456 }
ysr@887 1457 iter.move_to_next();
duke@435 1458 } else {
duke@435 1459 iter.next();
duke@435 1460 }
duke@435 1461 }
duke@435 1462 // Close the reachable set
duke@435 1463 complete_gc->do_void();
duke@435 1464
duke@435 1465 NOT_PRODUCT(
ysr@2651 1466 if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
jmasa@2188 1467 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
jmasa@2188 1468 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 1469 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 1470 }
duke@435 1471 )
duke@435 1472 }
duke@435 1473
duke@435 1474 const char* ReferenceProcessor::list_name(int i) {
jmasa@2188 1475 assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
jmasa@2188 1476 int j = i / _max_num_q;
duke@435 1477 switch (j) {
duke@435 1478 case 0: return "SoftRef";
duke@435 1479 case 1: return "WeakRef";
duke@435 1480 case 2: return "FinalRef";
duke@435 1481 case 3: return "PhantomRef";
duke@435 1482 }
duke@435 1483 ShouldNotReachHere();
duke@435 1484 return NULL;
duke@435 1485 }
duke@435 1486
duke@435 1487 #ifndef PRODUCT
duke@435 1488 void ReferenceProcessor::verify_ok_to_handle_reflists() {
duke@435 1489 // empty for now
duke@435 1490 }
duke@435 1491 #endif
duke@435 1492
duke@435 1493 #ifndef PRODUCT
duke@435 1494 void ReferenceProcessor::clear_discovered_references() {
duke@435 1495 guarantee(!_discovering_refs, "Discovering refs?");
jmasa@2188 1496 for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
stefank@3115 1497 clear_discovered_references(_discoveredSoftRefs[i]);
duke@435 1498 }
duke@435 1499 }
stefank@3115 1500
duke@435 1501 #endif // PRODUCT

mercurial