src/share/vm/memory/referenceProcessor.cpp

Thu, 22 Sep 2011 10:57:37 -0700

author
johnc
date
Thu, 22 Sep 2011 10:57:37 -0700
changeset 3175
4dfb2df418f2
parent 3117
eca1193ca245
child 3188
d1bdeef3e3e2
permissions
-rw-r--r--

6484982: G1: process references during evacuation pauses
Summary: G1 now uses two reference processors - one is used by concurrent marking and the other is used by STW GCs (both full and incremental evacuation pauses). In an evacuation pause, the reference processor is embedded into the closures used to scan objects. Doing so causes causes reference objects to be 'discovered' by the reference processor. At the end of the evacuation pause, these discovered reference objects are processed - preserving (and copying) referent objects (and their reachable graphs) as appropriate.
Reviewed-by: ysr, jwilhelm, brutisso, stefank, tonyp

duke@435 1 /*
ysr@2651 2 * Copyright (c) 2001, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "classfile/javaClasses.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "gc_interface/collectedHeap.hpp"
stefank@2314 29 #include "gc_interface/collectedHeap.inline.hpp"
stefank@2314 30 #include "memory/referencePolicy.hpp"
stefank@2314 31 #include "memory/referenceProcessor.hpp"
stefank@2314 32 #include "oops/oop.inline.hpp"
stefank@2314 33 #include "runtime/java.hpp"
stefank@2314 34 #include "runtime/jniHandles.hpp"
duke@435 35
ysr@888 36 ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
ysr@888 37 ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy = NULL;
ysr@3117 38 bool ReferenceProcessor::_pending_list_uses_discovered_field = false;
ysr@888 39
duke@435 40 void referenceProcessor_init() {
duke@435 41 ReferenceProcessor::init_statics();
duke@435 42 }
duke@435 43
duke@435 44 void ReferenceProcessor::init_statics() {
duke@435 45 // Initialize the master soft ref clock.
duke@435 46 java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
duke@435 47
ysr@888 48 _always_clear_soft_ref_policy = new AlwaysClearPolicy();
ysr@888 49 _default_soft_ref_policy = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
ysr@888 50 NOT_COMPILER2(LRUCurrentHeapPolicy());
ysr@888 51 if (_always_clear_soft_ref_policy == NULL || _default_soft_ref_policy == NULL) {
ysr@888 52 vm_exit_during_initialization("Could not allocate reference policy object");
ysr@888 53 }
duke@435 54 guarantee(RefDiscoveryPolicy == ReferenceBasedDiscovery ||
duke@435 55 RefDiscoveryPolicy == ReferentBasedDiscovery,
duke@435 56 "Unrecongnized RefDiscoveryPolicy");
ysr@3117 57 _pending_list_uses_discovered_field = JDK_Version::current().pending_list_uses_discovered_field();
duke@435 58 }
duke@435 59
duke@435 60 ReferenceProcessor::ReferenceProcessor(MemRegion span,
ysr@2651 61 bool mt_processing,
ysr@2651 62 int mt_processing_degree,
ysr@2651 63 bool mt_discovery,
ysr@2651 64 int mt_discovery_degree,
coleenp@548 65 bool atomic_discovery,
ysr@2651 66 BoolObjectClosure* is_alive_non_header,
ysr@777 67 bool discovered_list_needs_barrier) :
duke@435 68 _discovering_refs(false),
duke@435 69 _enqueuing_is_done(false),
ysr@2651 70 _is_alive_non_header(is_alive_non_header),
ysr@777 71 _discovered_list_needs_barrier(discovered_list_needs_barrier),
ysr@777 72 _bs(NULL),
duke@435 73 _processing_is_mt(mt_processing),
duke@435 74 _next_id(0)
duke@435 75 {
duke@435 76 _span = span;
duke@435 77 _discovery_is_atomic = atomic_discovery;
duke@435 78 _discovery_is_mt = mt_discovery;
ysr@2651 79 _num_q = MAX2(1, mt_processing_degree);
ysr@2651 80 _max_num_q = MAX2(_num_q, mt_discovery_degree);
johnc@3175 81 _discoveredSoftRefs = NEW_C_HEAP_ARRAY(DiscoveredList,
johnc@3175 82 _max_num_q * number_of_subclasses_of_ref());
duke@435 83 if (_discoveredSoftRefs == NULL) {
duke@435 84 vm_exit_during_initialization("Could not allocated RefProc Array");
duke@435 85 }
jmasa@2188 86 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
jmasa@2188 87 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
jmasa@2188 88 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
stefank@3115 89 // Initialized all entries to NULL
johnc@3175 90 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
stefank@3115 91 _discoveredSoftRefs[i].set_head(NULL);
duke@435 92 _discoveredSoftRefs[i].set_length(0);
duke@435 93 }
ysr@3117 94 // If we do barriers, cache a copy of the barrier set.
ysr@777 95 if (discovered_list_needs_barrier) {
ysr@777 96 _bs = Universe::heap()->barrier_set();
ysr@777 97 }
ysr@2651 98 setup_policy(false /* default soft ref policy */);
duke@435 99 }
duke@435 100
duke@435 101 #ifndef PRODUCT
duke@435 102 void ReferenceProcessor::verify_no_references_recorded() {
duke@435 103 guarantee(!_discovering_refs, "Discovering refs?");
johnc@3175 104 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
johnc@3175 105 guarantee(_discoveredSoftRefs[i].is_empty(),
duke@435 106 "Found non-empty discovered list");
duke@435 107 }
duke@435 108 }
duke@435 109 #endif
duke@435 110
duke@435 111 void ReferenceProcessor::weak_oops_do(OopClosure* f) {
johnc@3175 112 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
coleenp@548 113 if (UseCompressedOops) {
coleenp@548 114 f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 115 } else {
coleenp@548 116 f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
coleenp@548 117 }
duke@435 118 }
duke@435 119 }
duke@435 120
coleenp@548 121 void ReferenceProcessor::update_soft_ref_master_clock() {
duke@435 122 // Update (advance) the soft ref master clock field. This must be done
duke@435 123 // after processing the soft ref list.
duke@435 124 jlong now = os::javaTimeMillis();
duke@435 125 jlong clock = java_lang_ref_SoftReference::clock();
duke@435 126 NOT_PRODUCT(
duke@435 127 if (now < clock) {
duke@435 128 warning("time warp: %d to %d", clock, now);
duke@435 129 }
duke@435 130 )
duke@435 131 // In product mode, protect ourselves from system time being adjusted
duke@435 132 // externally and going backward; see note in the implementation of
duke@435 133 // GenCollectedHeap::time_since_last_gc() for the right way to fix
duke@435 134 // this uniformly throughout the VM; see bug-id 4741166. XXX
duke@435 135 if (now > clock) {
duke@435 136 java_lang_ref_SoftReference::set_clock(now);
duke@435 137 }
duke@435 138 // Else leave clock stalled at its old value until time progresses
duke@435 139 // past clock value.
duke@435 140 }
duke@435 141
coleenp@548 142 void ReferenceProcessor::process_discovered_references(
duke@435 143 BoolObjectClosure* is_alive,
duke@435 144 OopClosure* keep_alive,
duke@435 145 VoidClosure* complete_gc,
duke@435 146 AbstractRefProcTaskExecutor* task_executor) {
duke@435 147 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 148
duke@435 149 assert(!enqueuing_is_done(), "If here enqueuing should not be complete");
duke@435 150 // Stop treating discovered references specially.
duke@435 151 disable_discovery();
duke@435 152
duke@435 153 bool trace_time = PrintGCDetails && PrintReferenceGC;
duke@435 154 // Soft references
duke@435 155 {
duke@435 156 TraceTime tt("SoftReference", trace_time, false, gclog_or_tty);
ysr@888 157 process_discovered_reflist(_discoveredSoftRefs, _current_soft_ref_policy, true,
duke@435 158 is_alive, keep_alive, complete_gc, task_executor);
duke@435 159 }
duke@435 160
duke@435 161 update_soft_ref_master_clock();
duke@435 162
duke@435 163 // Weak references
duke@435 164 {
duke@435 165 TraceTime tt("WeakReference", trace_time, false, gclog_or_tty);
duke@435 166 process_discovered_reflist(_discoveredWeakRefs, NULL, true,
duke@435 167 is_alive, keep_alive, complete_gc, task_executor);
duke@435 168 }
duke@435 169
duke@435 170 // Final references
duke@435 171 {
duke@435 172 TraceTime tt("FinalReference", trace_time, false, gclog_or_tty);
duke@435 173 process_discovered_reflist(_discoveredFinalRefs, NULL, false,
duke@435 174 is_alive, keep_alive, complete_gc, task_executor);
duke@435 175 }
duke@435 176
duke@435 177 // Phantom references
duke@435 178 {
duke@435 179 TraceTime tt("PhantomReference", trace_time, false, gclog_or_tty);
duke@435 180 process_discovered_reflist(_discoveredPhantomRefs, NULL, false,
duke@435 181 is_alive, keep_alive, complete_gc, task_executor);
duke@435 182 }
duke@435 183
duke@435 184 // Weak global JNI references. It would make more sense (semantically) to
duke@435 185 // traverse these simultaneously with the regular weak references above, but
duke@435 186 // that is not how the JDK1.2 specification is. See #4126360. Native code can
duke@435 187 // thus use JNI weak references to circumvent the phantom references and
duke@435 188 // resurrect a "post-mortem" object.
duke@435 189 {
duke@435 190 TraceTime tt("JNI Weak Reference", trace_time, false, gclog_or_tty);
duke@435 191 if (task_executor != NULL) {
duke@435 192 task_executor->set_single_threaded_mode();
duke@435 193 }
duke@435 194 process_phaseJNI(is_alive, keep_alive, complete_gc);
duke@435 195 }
duke@435 196 }
duke@435 197
duke@435 198 #ifndef PRODUCT
duke@435 199 // Calculate the number of jni handles.
coleenp@548 200 uint ReferenceProcessor::count_jni_refs() {
duke@435 201 class AlwaysAliveClosure: public BoolObjectClosure {
duke@435 202 public:
coleenp@548 203 virtual bool do_object_b(oop obj) { return true; }
coleenp@548 204 virtual void do_object(oop obj) { assert(false, "Don't call"); }
duke@435 205 };
duke@435 206
duke@435 207 class CountHandleClosure: public OopClosure {
duke@435 208 private:
duke@435 209 int _count;
duke@435 210 public:
duke@435 211 CountHandleClosure(): _count(0) {}
coleenp@548 212 void do_oop(oop* unused) { _count++; }
coleenp@548 213 void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
duke@435 214 int count() { return _count; }
duke@435 215 };
duke@435 216 CountHandleClosure global_handle_count;
duke@435 217 AlwaysAliveClosure always_alive;
duke@435 218 JNIHandles::weak_oops_do(&always_alive, &global_handle_count);
duke@435 219 return global_handle_count.count();
duke@435 220 }
duke@435 221 #endif
duke@435 222
duke@435 223 void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
duke@435 224 OopClosure* keep_alive,
duke@435 225 VoidClosure* complete_gc) {
duke@435 226 #ifndef PRODUCT
duke@435 227 if (PrintGCDetails && PrintReferenceGC) {
duke@435 228 unsigned int count = count_jni_refs();
duke@435 229 gclog_or_tty->print(", %u refs", count);
duke@435 230 }
duke@435 231 #endif
duke@435 232 JNIHandles::weak_oops_do(is_alive, keep_alive);
duke@435 233 complete_gc->do_void();
duke@435 234 }
duke@435 235
coleenp@548 236
coleenp@548 237 template <class T>
phh@1558 238 bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
phh@1558 239 AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 240
duke@435 241 // Remember old value of pending references list
coleenp@548 242 T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
coleenp@548 243 T old_pending_list_value = *pending_list_addr;
duke@435 244
duke@435 245 // Enqueue references that are not made active again, and
duke@435 246 // clear the decks for the next collection (cycle).
coleenp@548 247 ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
duke@435 248 // Do the oop-check on pending_list_addr missed in
duke@435 249 // enqueue_discovered_reflist. We should probably
duke@435 250 // do a raw oop_check so that future such idempotent
duke@435 251 // oop_stores relying on the oop-check side-effect
duke@435 252 // may be elided automatically and safely without
duke@435 253 // affecting correctness.
coleenp@548 254 oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
duke@435 255
duke@435 256 // Stop treating discovered references specially.
coleenp@548 257 ref->disable_discovery();
duke@435 258
duke@435 259 // Return true if new pending references were added
duke@435 260 return old_pending_list_value != *pending_list_addr;
duke@435 261 }
duke@435 262
coleenp@548 263 bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
coleenp@548 264 NOT_PRODUCT(verify_ok_to_handle_reflists());
coleenp@548 265 if (UseCompressedOops) {
coleenp@548 266 return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
coleenp@548 267 } else {
coleenp@548 268 return enqueue_discovered_ref_helper<oop>(this, task_executor);
coleenp@548 269 }
coleenp@548 270 }
coleenp@548 271
duke@435 272 void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 273 HeapWord* pending_list_addr) {
duke@435 274 // Given a list of refs linked through the "discovered" field
ysr@3117 275 // (java.lang.ref.Reference.discovered), self-loop their "next" field
ysr@3117 276 // thus distinguishing them from active References, then
ysr@3117 277 // prepend them to the pending list.
ysr@3117 278 // BKWRD COMPATIBILITY NOTE: For older JDKs (prior to the fix for 4956777),
ysr@3117 279 // the "next" field is used to chain the pending list, not the discovered
ysr@3117 280 // field.
ysr@3117 281
duke@435 282 if (TraceReferenceGC && PrintGCDetails) {
duke@435 283 gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
duke@435 284 INTPTR_FORMAT, (address)refs_list.head());
duke@435 285 }
stefank@3115 286
stefank@3115 287 oop obj = NULL;
ysr@3117 288 oop next_d = refs_list.head();
ysr@3117 289 if (pending_list_uses_discovered_field()) { // New behaviour
ysr@3117 290 // Walk down the list, self-looping the next field
ysr@3117 291 // so that the References are not considered active.
ysr@3117 292 while (obj != next_d) {
ysr@3117 293 obj = next_d;
ysr@3117 294 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 295 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 296 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 297 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 298 obj, next_d);
ysr@3117 299 }
ysr@3117 300 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 301 "Reference not active; should not be discovered");
ysr@3117 302 // Self-loop next, so as to make Ref not active.
ysr@3117 303 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 304 if (next_d == obj) { // obj is last
ysr@3117 305 // Swap refs_list into pendling_list_addr and
ysr@3117 306 // set obj's discovered to what we read from pending_list_addr.
ysr@3117 307 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 308 // Need oop_check on pending_list_addr above;
ysr@3117 309 // see special oop-check code at the end of
ysr@3117 310 // enqueue_discovered_reflists() further below.
ysr@3117 311 java_lang_ref_Reference::set_discovered(obj, old); // old may be NULL
ysr@3117 312 }
duke@435 313 }
ysr@3117 314 } else { // Old behaviour
ysr@3117 315 // Walk down the list, copying the discovered field into
ysr@3117 316 // the next field and clearing the discovered field.
ysr@3117 317 while (obj != next_d) {
ysr@3117 318 obj = next_d;
ysr@3117 319 assert(obj->is_instanceRef(), "should be reference object");
ysr@3117 320 next_d = java_lang_ref_Reference::discovered(obj);
ysr@3117 321 if (TraceReferenceGC && PrintGCDetails) {
ysr@3117 322 gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
ysr@3117 323 obj, next_d);
ysr@3117 324 }
ysr@3117 325 assert(java_lang_ref_Reference::next(obj) == NULL,
ysr@3117 326 "The reference should not be enqueued");
ysr@3117 327 if (next_d == obj) { // obj is last
ysr@3117 328 // Swap refs_list into pendling_list_addr and
ysr@3117 329 // set obj's next to what we read from pending_list_addr.
ysr@3117 330 oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
ysr@3117 331 // Need oop_check on pending_list_addr above;
ysr@3117 332 // see special oop-check code at the end of
ysr@3117 333 // enqueue_discovered_reflists() further below.
ysr@3117 334 if (old == NULL) {
ysr@3117 335 // obj should be made to point to itself, since
ysr@3117 336 // pending list was empty.
ysr@3117 337 java_lang_ref_Reference::set_next(obj, obj);
ysr@3117 338 } else {
ysr@3117 339 java_lang_ref_Reference::set_next(obj, old);
ysr@3117 340 }
duke@435 341 } else {
ysr@3117 342 java_lang_ref_Reference::set_next(obj, next_d);
duke@435 343 }
ysr@3117 344 java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
duke@435 345 }
duke@435 346 }
duke@435 347 }
duke@435 348
duke@435 349 // Parallel enqueue task
duke@435 350 class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
duke@435 351 public:
duke@435 352 RefProcEnqueueTask(ReferenceProcessor& ref_processor,
duke@435 353 DiscoveredList discovered_refs[],
coleenp@548 354 HeapWord* pending_list_addr,
duke@435 355 int n_queues)
duke@435 356 : EnqueueTask(ref_processor, discovered_refs,
stefank@3115 357 pending_list_addr, n_queues)
duke@435 358 { }
duke@435 359
coleenp@548 360 virtual void work(unsigned int work_id) {
ysr@2651 361 assert(work_id < (unsigned int)_ref_processor.max_num_q(), "Index out-of-bounds");
duke@435 362 // Simplest first cut: static partitioning.
duke@435 363 int index = work_id;
jmasa@2188 364 // The increment on "index" must correspond to the maximum number of queues
jmasa@2188 365 // (n_queues) with which that ReferenceProcessor was created. That
jmasa@2188 366 // is because of the "clever" way the discovered references lists were
ysr@2651 367 // allocated and are indexed into.
ysr@2651 368 assert(_n_queues == (int) _ref_processor.max_num_q(), "Different number not expected");
jmasa@2188 369 for (int j = 0;
johnc@3175 370 j < ReferenceProcessor::number_of_subclasses_of_ref();
jmasa@2188 371 j++, index += _n_queues) {
duke@435 372 _ref_processor.enqueue_discovered_reflist(
duke@435 373 _refs_lists[index], _pending_list_addr);
stefank@3115 374 _refs_lists[index].set_head(NULL);
duke@435 375 _refs_lists[index].set_length(0);
duke@435 376 }
duke@435 377 }
duke@435 378 };
duke@435 379
duke@435 380 // Enqueue references that are not made active again
coleenp@548 381 void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
duke@435 382 AbstractRefProcTaskExecutor* task_executor) {
duke@435 383 if (_processing_is_mt && task_executor != NULL) {
duke@435 384 // Parallel code
duke@435 385 RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
stefank@3115 386 pending_list_addr, _max_num_q);
duke@435 387 task_executor->execute(tsk);
duke@435 388 } else {
duke@435 389 // Serial code: call the parent class's implementation
johnc@3175 390 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
duke@435 391 enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
stefank@3115 392 _discoveredSoftRefs[i].set_head(NULL);
duke@435 393 _discoveredSoftRefs[i].set_length(0);
duke@435 394 }
duke@435 395 }
duke@435 396 }
duke@435 397
johnc@3175 398 void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
duke@435 399 _discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
coleenp@548 400 oop discovered = java_lang_ref_Reference::discovered(_ref);
coleenp@548 401 assert(_discovered_addr && discovered->is_oop_or_null(),
duke@435 402 "discovered field is bad");
coleenp@548 403 _next = discovered;
duke@435 404 _referent_addr = java_lang_ref_Reference::referent_addr(_ref);
coleenp@548 405 _referent = java_lang_ref_Reference::referent(_ref);
duke@435 406 assert(Universe::heap()->is_in_reserved_or_null(_referent),
duke@435 407 "Wrong oop found in java.lang.Reference object");
duke@435 408 assert(allow_null_referent ?
duke@435 409 _referent->is_oop_or_null()
duke@435 410 : _referent->is_oop(),
duke@435 411 "bad referent");
duke@435 412 }
duke@435 413
johnc@3175 414 void DiscoveredListIterator::remove() {
duke@435 415 assert(_ref->is_oop(), "Dropping a bad reference");
coleenp@548 416 oop_store_raw(_discovered_addr, NULL);
stefank@3115 417
coleenp@548 418 // First _prev_next ref actually points into DiscoveredList (gross).
stefank@3115 419 oop new_next;
stefank@3115 420 if (_next == _ref) {
stefank@3115 421 // At the end of the list, we should make _prev point to itself.
stefank@3115 422 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
stefank@3115 423 // and _prev will be NULL.
stefank@3115 424 new_next = _prev;
stefank@3115 425 } else {
stefank@3115 426 new_next = _next;
stefank@3115 427 }
stefank@3115 428
coleenp@548 429 if (UseCompressedOops) {
coleenp@548 430 // Remove Reference object from list.
stefank@3115 431 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
coleenp@548 432 } else {
coleenp@548 433 // Remove Reference object from list.
stefank@3115 434 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
coleenp@548 435 }
duke@435 436 NOT_PRODUCT(_removed++);
ysr@887 437 _refs_list.dec_length(1);
duke@435 438 }
duke@435 439
johnc@3175 440 // Make the Reference object active again.
johnc@3175 441 void DiscoveredListIterator::make_active() {
johnc@3175 442 // For G1 we don't want to use set_next - it
johnc@3175 443 // will dirty the card for the next field of
johnc@3175 444 // the reference object and will fail
johnc@3175 445 // CT verification.
johnc@3175 446 if (UseG1GC) {
johnc@3175 447 BarrierSet* bs = oopDesc::bs();
johnc@3175 448 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
johnc@3175 449
johnc@3175 450 if (UseCompressedOops) {
johnc@3175 451 bs->write_ref_field_pre((narrowOop*)next_addr, NULL);
johnc@3175 452 } else {
johnc@3175 453 bs->write_ref_field_pre((oop*)next_addr, NULL);
johnc@3175 454 }
johnc@3175 455 java_lang_ref_Reference::set_next_raw(_ref, NULL);
stefank@3115 456 } else {
johnc@3175 457 java_lang_ref_Reference::set_next(_ref, NULL);
stefank@3115 458 }
johnc@3175 459 }
johnc@3175 460
johnc@3175 461 void DiscoveredListIterator::clear_referent() {
johnc@3175 462 oop_store_raw(_referent_addr, NULL);
duke@435 463 }
duke@435 464
duke@435 465 // NOTE: process_phase*() are largely similar, and at a high level
duke@435 466 // merely iterate over the extant list applying a predicate to
duke@435 467 // each of its elements and possibly removing that element from the
duke@435 468 // list and applying some further closures to that element.
duke@435 469 // We should consider the possibility of replacing these
duke@435 470 // process_phase*() methods by abstracting them into
duke@435 471 // a single general iterator invocation that receives appropriate
duke@435 472 // closures that accomplish this work.
duke@435 473
duke@435 474 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
duke@435 475 // referents are not alive, but that should be kept alive for policy reasons.
duke@435 476 // Keep alive the transitive closure of all such referents.
duke@435 477 void
coleenp@548 478 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
duke@435 479 ReferencePolicy* policy,
duke@435 480 BoolObjectClosure* is_alive,
duke@435 481 OopClosure* keep_alive,
duke@435 482 VoidClosure* complete_gc) {
duke@435 483 assert(policy != NULL, "Must have a non-NULL policy");
coleenp@548 484 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 485 // Decide which softly reachable refs should be kept alive.
duke@435 486 while (iter.has_next()) {
duke@435 487 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
duke@435 488 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
duke@435 489 if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
duke@435 490 if (TraceReferenceGC) {
duke@435 491 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
coleenp@548 492 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 493 }
ysr@887 494 // Remove Reference object from list
ysr@887 495 iter.remove();
duke@435 496 // Make the Reference object active again
duke@435 497 iter.make_active();
duke@435 498 // keep the referent around
duke@435 499 iter.make_referent_alive();
ysr@887 500 iter.move_to_next();
duke@435 501 } else {
duke@435 502 iter.next();
duke@435 503 }
duke@435 504 }
duke@435 505 // Close the reachable set
duke@435 506 complete_gc->do_void();
duke@435 507 NOT_PRODUCT(
duke@435 508 if (PrintGCDetails && TraceReferenceGC) {
jmasa@2188 509 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
ysr@3117 510 "discovered Refs by policy, from list " INTPTR_FORMAT,
jmasa@2188 511 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 512 }
duke@435 513 )
duke@435 514 }
duke@435 515
duke@435 516 // Traverse the list and remove any Refs that are not active, or
duke@435 517 // whose referents are either alive or NULL.
duke@435 518 void
coleenp@548 519 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
duke@435 520 BoolObjectClosure* is_alive,
coleenp@548 521 OopClosure* keep_alive) {
duke@435 522 assert(discovery_is_atomic(), "Error");
coleenp@548 523 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 524 while (iter.has_next()) {
duke@435 525 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
coleenp@548 526 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
coleenp@548 527 assert(next == NULL, "Should not discover inactive Reference");
duke@435 528 if (iter.is_referent_alive()) {
duke@435 529 if (TraceReferenceGC) {
duke@435 530 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
coleenp@548 531 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 532 }
duke@435 533 // The referent is reachable after all.
ysr@887 534 // Remove Reference object from list.
ysr@887 535 iter.remove();
duke@435 536 // Update the referent pointer as necessary: Note that this
duke@435 537 // should not entail any recursive marking because the
duke@435 538 // referent must already have been traversed.
duke@435 539 iter.make_referent_alive();
ysr@887 540 iter.move_to_next();
duke@435 541 } else {
duke@435 542 iter.next();
duke@435 543 }
duke@435 544 }
duke@435 545 NOT_PRODUCT(
ysr@2651 546 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 547 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 548 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 549 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 550 }
duke@435 551 )
duke@435 552 }
duke@435 553
duke@435 554 void
coleenp@548 555 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
coleenp@548 556 BoolObjectClosure* is_alive,
coleenp@548 557 OopClosure* keep_alive,
coleenp@548 558 VoidClosure* complete_gc) {
duke@435 559 assert(!discovery_is_atomic(), "Error");
coleenp@548 560 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 561 while (iter.has_next()) {
duke@435 562 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 563 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
coleenp@548 564 oop next = java_lang_ref_Reference::next(iter.obj());
duke@435 565 if ((iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 566 next != NULL)) {
coleenp@548 567 assert(next->is_oop_or_null(), "bad next field");
duke@435 568 // Remove Reference object from list
duke@435 569 iter.remove();
duke@435 570 // Trace the cohorts
duke@435 571 iter.make_referent_alive();
coleenp@548 572 if (UseCompressedOops) {
coleenp@548 573 keep_alive->do_oop((narrowOop*)next_addr);
coleenp@548 574 } else {
coleenp@548 575 keep_alive->do_oop((oop*)next_addr);
coleenp@548 576 }
ysr@887 577 iter.move_to_next();
duke@435 578 } else {
duke@435 579 iter.next();
duke@435 580 }
duke@435 581 }
duke@435 582 // Now close the newly reachable set
duke@435 583 complete_gc->do_void();
duke@435 584 NOT_PRODUCT(
ysr@2651 585 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
jmasa@2188 586 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
jmasa@2188 587 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 588 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 589 }
duke@435 590 )
duke@435 591 }
duke@435 592
duke@435 593 // Traverse the list and process the referents, by either
coleenp@548 594 // clearing them or keeping them (and their reachable
duke@435 595 // closure) alive.
duke@435 596 void
coleenp@548 597 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
duke@435 598 bool clear_referent,
duke@435 599 BoolObjectClosure* is_alive,
duke@435 600 OopClosure* keep_alive,
duke@435 601 VoidClosure* complete_gc) {
jmasa@2188 602 ResourceMark rm;
coleenp@548 603 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 604 while (iter.has_next()) {
duke@435 605 iter.update_discovered();
duke@435 606 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
duke@435 607 if (clear_referent) {
duke@435 608 // NULL out referent pointer
duke@435 609 iter.clear_referent();
duke@435 610 } else {
duke@435 611 // keep the referent around
duke@435 612 iter.make_referent_alive();
duke@435 613 }
duke@435 614 if (TraceReferenceGC) {
duke@435 615 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
duke@435 616 clear_referent ? "cleared " : "",
coleenp@548 617 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 618 }
duke@435 619 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
duke@435 620 iter.next();
duke@435 621 }
stefank@3115 622 // Remember to update the next pointer of the last ref.
duke@435 623 iter.update_discovered();
duke@435 624 // Close the reachable set
duke@435 625 complete_gc->do_void();
duke@435 626 }
duke@435 627
duke@435 628 void
stefank@3115 629 ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
stefank@3115 630 oop obj = NULL;
stefank@3115 631 oop next = refs_list.head();
stefank@3115 632 while (next != obj) {
stefank@3115 633 obj = next;
stefank@3115 634 next = java_lang_ref_Reference::discovered(obj);
stefank@3115 635 java_lang_ref_Reference::set_discovered_raw(obj, NULL);
stefank@3115 636 }
stefank@3115 637 refs_list.set_head(NULL);
stefank@3115 638 refs_list.set_length(0);
stefank@3115 639 }
stefank@3115 640
stefank@3115 641 void
coleenp@548 642 ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
stefank@3115 643 clear_discovered_references(refs_list);
duke@435 644 }
duke@435 645
ysr@777 646 void ReferenceProcessor::abandon_partial_discovery() {
ysr@777 647 // loop over the lists
johnc@3175 648 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
jmasa@2188 649 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
johnc@3175 650 gclog_or_tty->print_cr("\nAbandoning %s discovered list", list_name(i));
ysr@777 651 }
ysr@777 652 abandon_partial_discovered_list(_discoveredSoftRefs[i]);
duke@435 653 }
duke@435 654 }
duke@435 655
duke@435 656 class RefProcPhase1Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 657 public:
duke@435 658 RefProcPhase1Task(ReferenceProcessor& ref_processor,
duke@435 659 DiscoveredList refs_lists[],
duke@435 660 ReferencePolicy* policy,
duke@435 661 bool marks_oops_alive)
duke@435 662 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 663 _policy(policy)
duke@435 664 { }
duke@435 665 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 666 OopClosure& keep_alive,
duke@435 667 VoidClosure& complete_gc)
duke@435 668 {
jmasa@2188 669 Thread* thr = Thread::current();
jmasa@2188 670 int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 671 _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
duke@435 672 &is_alive, &keep_alive, &complete_gc);
duke@435 673 }
duke@435 674 private:
duke@435 675 ReferencePolicy* _policy;
duke@435 676 };
duke@435 677
duke@435 678 class RefProcPhase2Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 679 public:
duke@435 680 RefProcPhase2Task(ReferenceProcessor& ref_processor,
duke@435 681 DiscoveredList refs_lists[],
duke@435 682 bool marks_oops_alive)
duke@435 683 : ProcessTask(ref_processor, refs_lists, marks_oops_alive)
duke@435 684 { }
duke@435 685 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 686 OopClosure& keep_alive,
duke@435 687 VoidClosure& complete_gc)
duke@435 688 {
duke@435 689 _ref_processor.process_phase2(_refs_lists[i],
duke@435 690 &is_alive, &keep_alive, &complete_gc);
duke@435 691 }
duke@435 692 };
duke@435 693
duke@435 694 class RefProcPhase3Task: public AbstractRefProcTaskExecutor::ProcessTask {
duke@435 695 public:
duke@435 696 RefProcPhase3Task(ReferenceProcessor& ref_processor,
duke@435 697 DiscoveredList refs_lists[],
duke@435 698 bool clear_referent,
duke@435 699 bool marks_oops_alive)
duke@435 700 : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
duke@435 701 _clear_referent(clear_referent)
duke@435 702 { }
duke@435 703 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
duke@435 704 OopClosure& keep_alive,
duke@435 705 VoidClosure& complete_gc)
duke@435 706 {
jmasa@2188 707 // Don't use "refs_list_index" calculated in this way because
jmasa@2188 708 // balance_queues() has moved the Ref's into the first n queues.
jmasa@2188 709 // Thread* thr = Thread::current();
jmasa@2188 710 // int refs_list_index = ((WorkerThread*)thr)->id();
jmasa@2188 711 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
duke@435 712 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
duke@435 713 &is_alive, &keep_alive, &complete_gc);
duke@435 714 }
duke@435 715 private:
duke@435 716 bool _clear_referent;
duke@435 717 };
duke@435 718
johnc@3175 719 void ReferenceProcessor::set_discovered(oop ref, oop value) {
johnc@3175 720 if (_discovered_list_needs_barrier) {
johnc@3175 721 java_lang_ref_Reference::set_discovered(ref, value);
johnc@3175 722 } else {
johnc@3175 723 java_lang_ref_Reference::set_discovered_raw(ref, value);
johnc@3175 724 }
johnc@3175 725 }
johnc@3175 726
duke@435 727 // Balances reference queues.
jmasa@2188 728 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
jmasa@2188 729 // queues[0, 1, ..., _num_q-1] because only the first _num_q
jmasa@2188 730 // corresponding to the active workers will be processed.
duke@435 731 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
duke@435 732 {
duke@435 733 // calculate total length
duke@435 734 size_t total_refs = 0;
jmasa@2188 735 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 736 gclog_or_tty->print_cr("\nBalance ref_lists ");
jmasa@2188 737 }
jmasa@2188 738
jmasa@2188 739 for (int i = 0; i < _max_num_q; ++i) {
duke@435 740 total_refs += ref_lists[i].length();
jmasa@2188 741 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 742 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 743 }
jmasa@2188 744 }
jmasa@2188 745 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 746 gclog_or_tty->print_cr(" = %d", total_refs);
duke@435 747 }
duke@435 748 size_t avg_refs = total_refs / _num_q + 1;
duke@435 749 int to_idx = 0;
jmasa@2188 750 for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
jmasa@2188 751 bool move_all = false;
jmasa@2188 752 if (from_idx >= _num_q) {
jmasa@2188 753 move_all = ref_lists[from_idx].length() > 0;
jmasa@2188 754 }
jmasa@2188 755 while ((ref_lists[from_idx].length() > avg_refs) ||
jmasa@2188 756 move_all) {
duke@435 757 assert(to_idx < _num_q, "Sanity Check!");
duke@435 758 if (ref_lists[to_idx].length() < avg_refs) {
duke@435 759 // move superfluous refs
jmasa@2188 760 size_t refs_to_move;
jmasa@2188 761 // Move all the Ref's if the from queue will not be processed.
jmasa@2188 762 if (move_all) {
jmasa@2188 763 refs_to_move = MIN2(ref_lists[from_idx].length(),
jmasa@2188 764 avg_refs - ref_lists[to_idx].length());
jmasa@2188 765 } else {
jmasa@2188 766 refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
jmasa@2188 767 avg_refs - ref_lists[to_idx].length());
jmasa@2188 768 }
stefank@3115 769
stefank@3115 770 assert(refs_to_move > 0, "otherwise the code below will fail");
stefank@3115 771
duke@435 772 oop move_head = ref_lists[from_idx].head();
duke@435 773 oop move_tail = move_head;
duke@435 774 oop new_head = move_head;
duke@435 775 // find an element to split the list on
duke@435 776 for (size_t j = 0; j < refs_to_move; ++j) {
duke@435 777 move_tail = new_head;
coleenp@548 778 new_head = java_lang_ref_Reference::discovered(new_head);
duke@435 779 }
stefank@3115 780
stefank@3115 781 // Add the chain to the to list.
stefank@3115 782 if (ref_lists[to_idx].head() == NULL) {
stefank@3115 783 // to list is empty. Make a loop at the end.
johnc@3175 784 set_discovered(move_tail, move_tail);
stefank@3115 785 } else {
johnc@3175 786 set_discovered(move_tail, ref_lists[to_idx].head());
stefank@3115 787 }
duke@435 788 ref_lists[to_idx].set_head(move_head);
ysr@887 789 ref_lists[to_idx].inc_length(refs_to_move);
stefank@3115 790
stefank@3115 791 // Remove the chain from the from list.
stefank@3115 792 if (move_tail == new_head) {
stefank@3115 793 // We found the end of the from list.
stefank@3115 794 ref_lists[from_idx].set_head(NULL);
stefank@3115 795 } else {
stefank@3115 796 ref_lists[from_idx].set_head(new_head);
stefank@3115 797 }
ysr@887 798 ref_lists[from_idx].dec_length(refs_to_move);
jmasa@2188 799 if (ref_lists[from_idx].length() == 0) {
jmasa@2188 800 break;
jmasa@2188 801 }
duke@435 802 } else {
jmasa@2188 803 to_idx = (to_idx + 1) % _num_q;
duke@435 804 }
duke@435 805 }
duke@435 806 }
jmasa@2188 807 #ifdef ASSERT
jmasa@2188 808 size_t balanced_total_refs = 0;
jmasa@2188 809 for (int i = 0; i < _max_num_q; ++i) {
jmasa@2188 810 balanced_total_refs += ref_lists[i].length();
jmasa@2188 811 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 812 gclog_or_tty->print("%d ", ref_lists[i].length());
jmasa@2188 813 }
jmasa@2188 814 }
jmasa@2188 815 if (TraceReferenceGC && PrintGCDetails) {
jmasa@2188 816 gclog_or_tty->print_cr(" = %d", balanced_total_refs);
jmasa@2188 817 gclog_or_tty->flush();
jmasa@2188 818 }
jmasa@2188 819 assert(total_refs == balanced_total_refs, "Balancing was incomplete");
jmasa@2188 820 #endif
jmasa@2188 821 }
jmasa@2188 822
jmasa@2188 823 void ReferenceProcessor::balance_all_queues() {
jmasa@2188 824 balance_queues(_discoveredSoftRefs);
jmasa@2188 825 balance_queues(_discoveredWeakRefs);
jmasa@2188 826 balance_queues(_discoveredFinalRefs);
jmasa@2188 827 balance_queues(_discoveredPhantomRefs);
duke@435 828 }
duke@435 829
duke@435 830 void
duke@435 831 ReferenceProcessor::process_discovered_reflist(
duke@435 832 DiscoveredList refs_lists[],
duke@435 833 ReferencePolicy* policy,
duke@435 834 bool clear_referent,
duke@435 835 BoolObjectClosure* is_alive,
duke@435 836 OopClosure* keep_alive,
duke@435 837 VoidClosure* complete_gc,
duke@435 838 AbstractRefProcTaskExecutor* task_executor)
duke@435 839 {
jmasa@2188 840 bool mt_processing = task_executor != NULL && _processing_is_mt;
jmasa@2188 841 // If discovery used MT and a dynamic number of GC threads, then
jmasa@2188 842 // the queues must be balanced for correctness if fewer than the
jmasa@2188 843 // maximum number of queues were used. The number of queue used
jmasa@2188 844 // during discovery may be different than the number to be used
jmasa@2188 845 // for processing so don't depend of _num_q < _max_num_q as part
jmasa@2188 846 // of the test.
jmasa@2188 847 bool must_balance = _discovery_is_mt;
jmasa@2188 848
jmasa@2188 849 if ((mt_processing && ParallelRefProcBalancingEnabled) ||
jmasa@2188 850 must_balance) {
duke@435 851 balance_queues(refs_lists);
duke@435 852 }
duke@435 853 if (PrintReferenceGC && PrintGCDetails) {
duke@435 854 size_t total = 0;
ysr@2651 855 for (int i = 0; i < _max_num_q; ++i) {
duke@435 856 total += refs_lists[i].length();
duke@435 857 }
duke@435 858 gclog_or_tty->print(", %u refs", total);
duke@435 859 }
duke@435 860
duke@435 861 // Phase 1 (soft refs only):
duke@435 862 // . Traverse the list and remove any SoftReferences whose
duke@435 863 // referents are not alive, but that should be kept alive for
duke@435 864 // policy reasons. Keep alive the transitive closure of all
duke@435 865 // such referents.
duke@435 866 if (policy != NULL) {
jmasa@2188 867 if (mt_processing) {
duke@435 868 RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
duke@435 869 task_executor->execute(phase1);
duke@435 870 } else {
ysr@2651 871 for (int i = 0; i < _max_num_q; i++) {
duke@435 872 process_phase1(refs_lists[i], policy,
duke@435 873 is_alive, keep_alive, complete_gc);
duke@435 874 }
duke@435 875 }
duke@435 876 } else { // policy == NULL
duke@435 877 assert(refs_lists != _discoveredSoftRefs,
duke@435 878 "Policy must be specified for soft references.");
duke@435 879 }
duke@435 880
duke@435 881 // Phase 2:
duke@435 882 // . Traverse the list and remove any refs whose referents are alive.
jmasa@2188 883 if (mt_processing) {
duke@435 884 RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
duke@435 885 task_executor->execute(phase2);
duke@435 886 } else {
ysr@2651 887 for (int i = 0; i < _max_num_q; i++) {
duke@435 888 process_phase2(refs_lists[i], is_alive, keep_alive, complete_gc);
duke@435 889 }
duke@435 890 }
duke@435 891
duke@435 892 // Phase 3:
duke@435 893 // . Traverse the list and process referents as appropriate.
jmasa@2188 894 if (mt_processing) {
duke@435 895 RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
duke@435 896 task_executor->execute(phase3);
duke@435 897 } else {
ysr@2651 898 for (int i = 0; i < _max_num_q; i++) {
duke@435 899 process_phase3(refs_lists[i], clear_referent,
duke@435 900 is_alive, keep_alive, complete_gc);
duke@435 901 }
duke@435 902 }
duke@435 903 }
duke@435 904
duke@435 905 void ReferenceProcessor::clean_up_discovered_references() {
duke@435 906 // loop over the lists
johnc@3175 907 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
ysr@2651 908 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
duke@435 909 gclog_or_tty->print_cr(
duke@435 910 "\nScrubbing %s discovered list of Null referents",
duke@435 911 list_name(i));
duke@435 912 }
duke@435 913 clean_up_discovered_reflist(_discoveredSoftRefs[i]);
duke@435 914 }
duke@435 915 }
duke@435 916
duke@435 917 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
duke@435 918 assert(!discovery_is_atomic(), "Else why call this method?");
duke@435 919 DiscoveredListIterator iter(refs_list, NULL, NULL);
duke@435 920 while (iter.has_next()) {
duke@435 921 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 922 oop next = java_lang_ref_Reference::next(iter.obj());
coleenp@548 923 assert(next->is_oop_or_null(), "bad next field");
duke@435 924 // If referent has been cleared or Reference is not active,
duke@435 925 // drop it.
coleenp@548 926 if (iter.referent() == NULL || next != NULL) {
duke@435 927 debug_only(
duke@435 928 if (PrintGCDetails && TraceReferenceGC) {
duke@435 929 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
duke@435 930 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
duke@435 931 " and referent: " INTPTR_FORMAT,
coleenp@548 932 iter.obj(), next, iter.referent());
duke@435 933 }
duke@435 934 )
duke@435 935 // Remove Reference object from list
duke@435 936 iter.remove();
ysr@887 937 iter.move_to_next();
duke@435 938 } else {
duke@435 939 iter.next();
duke@435 940 }
duke@435 941 }
duke@435 942 NOT_PRODUCT(
duke@435 943 if (PrintGCDetails && TraceReferenceGC) {
duke@435 944 gclog_or_tty->print(
duke@435 945 " Removed %d Refs with NULL referents out of %d discovered Refs",
duke@435 946 iter.removed(), iter.processed());
duke@435 947 }
duke@435 948 )
duke@435 949 }
duke@435 950
duke@435 951 inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt) {
duke@435 952 int id = 0;
duke@435 953 // Determine the queue index to use for this object.
duke@435 954 if (_discovery_is_mt) {
duke@435 955 // During a multi-threaded discovery phase,
duke@435 956 // each thread saves to its "own" list.
duke@435 957 Thread* thr = Thread::current();
johnc@2316 958 id = thr->as_Worker_thread()->id();
duke@435 959 } else {
duke@435 960 // single-threaded discovery, we save in round-robin
duke@435 961 // fashion to each of the lists.
duke@435 962 if (_processing_is_mt) {
duke@435 963 id = next_id();
duke@435 964 }
duke@435 965 }
jmasa@2188 966 assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
duke@435 967
duke@435 968 // Get the discovered queue to which we will add
duke@435 969 DiscoveredList* list = NULL;
duke@435 970 switch (rt) {
duke@435 971 case REF_OTHER:
duke@435 972 // Unknown reference type, no special treatment
duke@435 973 break;
duke@435 974 case REF_SOFT:
duke@435 975 list = &_discoveredSoftRefs[id];
duke@435 976 break;
duke@435 977 case REF_WEAK:
duke@435 978 list = &_discoveredWeakRefs[id];
duke@435 979 break;
duke@435 980 case REF_FINAL:
duke@435 981 list = &_discoveredFinalRefs[id];
duke@435 982 break;
duke@435 983 case REF_PHANTOM:
duke@435 984 list = &_discoveredPhantomRefs[id];
duke@435 985 break;
duke@435 986 case REF_NONE:
duke@435 987 // we should not reach here if we are an instanceRefKlass
duke@435 988 default:
duke@435 989 ShouldNotReachHere();
duke@435 990 }
jmasa@2188 991 if (TraceReferenceGC && PrintGCDetails) {
johnc@2316 992 gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
jmasa@2188 993 }
duke@435 994 return list;
duke@435 995 }
duke@435 996
coleenp@548 997 inline void
coleenp@548 998 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
coleenp@548 999 oop obj,
coleenp@548 1000 HeapWord* discovered_addr) {
duke@435 1001 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
duke@435 1002 // First we must make sure this object is only enqueued once. CAS in a non null
duke@435 1003 // discovered_addr.
ysr@777 1004 oop current_head = refs_list.head();
stefank@3115 1005 // The last ref must have its discovered field pointing to itself.
stefank@3115 1006 oop next_discovered = (current_head != NULL) ? current_head : obj;
ysr@777 1007
ysr@1280 1008 // Note: In the case of G1, this specific pre-barrier is strictly
ysr@777 1009 // not necessary because the only case we are interested in
ysr@1280 1010 // here is when *discovered_addr is NULL (see the CAS further below),
ysr@1280 1011 // so this will expand to nothing. As a result, we have manually
ysr@1280 1012 // elided this out for G1, but left in the test for some future
ysr@3117 1013 // collector that might have need for a pre-barrier here, e.g.:-
ysr@3117 1014 // _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@3117 1015 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1016 "Need to check non-G1 collector: "
ysr@3117 1017 "may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1018 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
coleenp@548 1019 NULL);
duke@435 1020 if (retest == NULL) {
duke@435 1021 // This thread just won the right to enqueue the object.
ysr@3117 1022 // We have separate lists for enqueueing, so no synchronization
duke@435 1023 // is necessary.
coleenp@548 1024 refs_list.set_head(obj);
ysr@887 1025 refs_list.inc_length(1);
ysr@777 1026 if (_discovered_list_needs_barrier) {
stefank@3115 1027 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1028 }
johnc@2316 1029
johnc@2316 1030 if (TraceReferenceGC) {
ysr@3117 1031 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
johnc@2316 1032 obj, obj->blueprint()->internal_name());
johnc@2316 1033 }
duke@435 1034 } else {
duke@435 1035 // If retest was non NULL, another thread beat us to it:
duke@435 1036 // The reference has already been discovered...
duke@435 1037 if (TraceReferenceGC) {
ysr@3117 1038 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
duke@435 1039 obj, obj->blueprint()->internal_name());
duke@435 1040 }
duke@435 1041 }
duke@435 1042 }
duke@435 1043
ysr@2337 1044 #ifndef PRODUCT
ysr@2337 1045 // Non-atomic (i.e. concurrent) discovery might allow us
ysr@2337 1046 // to observe j.l.References with NULL referents, being those
ysr@2337 1047 // cleared concurrently by mutators during (or after) discovery.
ysr@2337 1048 void ReferenceProcessor::verify_referent(oop obj) {
ysr@2337 1049 bool da = discovery_is_atomic();
ysr@2337 1050 oop referent = java_lang_ref_Reference::referent(obj);
ysr@2337 1051 assert(da ? referent->is_oop() : referent->is_oop_or_null(),
ysr@2337 1052 err_msg("Bad referent " INTPTR_FORMAT " found in Reference "
ysr@2337 1053 INTPTR_FORMAT " during %satomic discovery ",
ysr@2337 1054 (intptr_t)referent, (intptr_t)obj, da ? "" : "non-"));
ysr@2337 1055 }
ysr@2337 1056 #endif
ysr@2337 1057
duke@435 1058 // We mention two of several possible choices here:
duke@435 1059 // #0: if the reference object is not in the "originating generation"
duke@435 1060 // (or part of the heap being collected, indicated by our "span"
duke@435 1061 // we don't treat it specially (i.e. we scan it as we would
duke@435 1062 // a normal oop, treating its references as strong references).
ysr@3117 1063 // This means that references can't be discovered unless their
duke@435 1064 // referent is also in the same span. This is the simplest,
duke@435 1065 // most "local" and most conservative approach, albeit one
duke@435 1066 // that may cause weak references to be enqueued least promptly.
duke@435 1067 // We call this choice the "ReferenceBasedDiscovery" policy.
duke@435 1068 // #1: the reference object may be in any generation (span), but if
duke@435 1069 // the referent is in the generation (span) being currently collected
duke@435 1070 // then we can discover the reference object, provided
duke@435 1071 // the object has not already been discovered by
duke@435 1072 // a different concurrently running collector (as may be the
duke@435 1073 // case, for instance, if the reference object is in CMS and
duke@435 1074 // the referent in DefNewGeneration), and provided the processing
duke@435 1075 // of this reference object by the current collector will
duke@435 1076 // appear atomic to every other collector in the system.
duke@435 1077 // (Thus, for instance, a concurrent collector may not
duke@435 1078 // discover references in other generations even if the
duke@435 1079 // referent is in its own generation). This policy may,
duke@435 1080 // in certain cases, enqueue references somewhat sooner than
duke@435 1081 // might Policy #0 above, but at marginally increased cost
duke@435 1082 // and complexity in processing these references.
duke@435 1083 // We call this choice the "RefeferentBasedDiscovery" policy.
duke@435 1084 bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
ysr@3117 1085 // Make sure we are discovering refs (rather than processing discovered refs).
duke@435 1086 if (!_discovering_refs || !RegisterReferences) {
duke@435 1087 return false;
duke@435 1088 }
ysr@3117 1089 // We only discover active references.
coleenp@548 1090 oop next = java_lang_ref_Reference::next(obj);
ysr@3117 1091 if (next != NULL) { // Ref is no longer active
duke@435 1092 return false;
duke@435 1093 }
duke@435 1094
duke@435 1095 HeapWord* obj_addr = (HeapWord*)obj;
duke@435 1096 if (RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1097 !_span.contains(obj_addr)) {
duke@435 1098 // Reference is not in the originating generation;
duke@435 1099 // don't treat it specially (i.e. we want to scan it as a normal
duke@435 1100 // object with strong references).
duke@435 1101 return false;
duke@435 1102 }
duke@435 1103
ysr@3117 1104 // We only discover references whose referents are not (yet)
ysr@3117 1105 // known to be strongly reachable.
duke@435 1106 if (is_alive_non_header() != NULL) {
ysr@2337 1107 verify_referent(obj);
ysr@2337 1108 if (is_alive_non_header()->do_object_b(java_lang_ref_Reference::referent(obj))) {
duke@435 1109 return false; // referent is reachable
duke@435 1110 }
duke@435 1111 }
ysr@888 1112 if (rt == REF_SOFT) {
ysr@888 1113 // For soft refs we can decide now if these are not
ysr@888 1114 // current candidates for clearing, in which case we
ysr@888 1115 // can mark through them now, rather than delaying that
ysr@888 1116 // to the reference-processing phase. Since all current
ysr@888 1117 // time-stamp policies advance the soft-ref clock only
ysr@888 1118 // at a major collection cycle, this is always currently
ysr@888 1119 // accurate.
ysr@888 1120 if (!_current_soft_ref_policy->should_clear_reference(obj)) {
ysr@888 1121 return false;
ysr@888 1122 }
ysr@888 1123 }
duke@435 1124
johnc@3175 1125 ResourceMark rm; // Needed for tracing.
johnc@3175 1126
ysr@777 1127 HeapWord* const discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
ysr@777 1128 const oop discovered = java_lang_ref_Reference::discovered(obj);
coleenp@548 1129 assert(discovered->is_oop_or_null(), "bad discovered field");
coleenp@548 1130 if (discovered != NULL) {
duke@435 1131 // The reference has already been discovered...
duke@435 1132 if (TraceReferenceGC) {
ysr@3117 1133 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
coleenp@548 1134 obj, obj->blueprint()->internal_name());
duke@435 1135 }
duke@435 1136 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
duke@435 1137 // assumes that an object is not processed twice;
duke@435 1138 // if it's been already discovered it must be on another
duke@435 1139 // generation's discovered list; so we won't discover it.
duke@435 1140 return false;
duke@435 1141 } else {
duke@435 1142 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery,
duke@435 1143 "Unrecognized policy");
duke@435 1144 // Check assumption that an object is not potentially
duke@435 1145 // discovered twice except by concurrent collectors that potentially
duke@435 1146 // trace the same Reference object twice.
johnc@2316 1147 assert(UseConcMarkSweepGC || UseG1GC,
johnc@2316 1148 "Only possible with a concurrent marking collector");
duke@435 1149 return true;
duke@435 1150 }
duke@435 1151 }
duke@435 1152
duke@435 1153 if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
ysr@2337 1154 verify_referent(obj);
ysr@3117 1155 // Discover if and only if EITHER:
ysr@3117 1156 // .. reference is in our span, OR
ysr@3117 1157 // .. we are an atomic collector and referent is in our span
duke@435 1158 if (_span.contains(obj_addr) ||
ysr@2337 1159 (discovery_is_atomic() &&
ysr@2337 1160 _span.contains(java_lang_ref_Reference::referent(obj)))) {
duke@435 1161 // should_enqueue = true;
duke@435 1162 } else {
duke@435 1163 return false;
duke@435 1164 }
duke@435 1165 } else {
duke@435 1166 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
duke@435 1167 _span.contains(obj_addr), "code inconsistency");
duke@435 1168 }
duke@435 1169
duke@435 1170 // Get the right type of discovered queue head.
duke@435 1171 DiscoveredList* list = get_discovered_list(rt);
duke@435 1172 if (list == NULL) {
duke@435 1173 return false; // nothing special needs to be done
duke@435 1174 }
duke@435 1175
duke@435 1176 if (_discovery_is_mt) {
duke@435 1177 add_to_discovered_list_mt(*list, obj, discovered_addr);
duke@435 1178 } else {
ysr@777 1179 // If "_discovered_list_needs_barrier", we do write barriers when
ysr@777 1180 // updating the discovered reference list. Otherwise, we do a raw store
ysr@777 1181 // here: the field will be visited later when processing the discovered
ysr@777 1182 // references.
ysr@777 1183 oop current_head = list->head();
stefank@3115 1184 // The last ref must have its discovered field pointing to itself.
stefank@3115 1185 oop next_discovered = (current_head != NULL) ? current_head : obj;
stefank@3115 1186
ysr@777 1187 // As in the case further above, since we are over-writing a NULL
ysr@777 1188 // pre-value, we can safely elide the pre-barrier here for the case of G1.
ysr@3117 1189 // e.g.:- _bs->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
ysr@777 1190 assert(discovered == NULL, "control point invariant");
ysr@3117 1191 assert(!_discovered_list_needs_barrier || UseG1GC,
ysr@3117 1192 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
stefank@3115 1193 oop_store_raw(discovered_addr, next_discovered);
ysr@777 1194 if (_discovered_list_needs_barrier) {
stefank@3115 1195 _bs->write_ref_field((void*)discovered_addr, next_discovered);
ysr@777 1196 }
duke@435 1197 list->set_head(obj);
ysr@887 1198 list->inc_length(1);
duke@435 1199
johnc@2316 1200 if (TraceReferenceGC) {
ysr@3117 1201 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
johnc@2316 1202 obj, obj->blueprint()->internal_name());
duke@435 1203 }
duke@435 1204 }
ysr@3117 1205 assert(obj->is_oop(), "Discovered a bad reference");
ysr@2337 1206 verify_referent(obj);
duke@435 1207 return true;
duke@435 1208 }
duke@435 1209
duke@435 1210 // Preclean the discovered references by removing those
duke@435 1211 // whose referents are alive, and by marking from those that
duke@435 1212 // are not active. These lists can be handled here
duke@435 1213 // in any order and, indeed, concurrently.
duke@435 1214 void ReferenceProcessor::preclean_discovered_references(
duke@435 1215 BoolObjectClosure* is_alive,
duke@435 1216 OopClosure* keep_alive,
duke@435 1217 VoidClosure* complete_gc,
jmasa@1625 1218 YieldClosure* yield,
jmasa@1625 1219 bool should_unload_classes) {
duke@435 1220
duke@435 1221 NOT_PRODUCT(verify_ok_to_handle_reflists());
duke@435 1222
jmasa@1370 1223 #ifdef ASSERT
jmasa@1370 1224 bool must_remember_klasses = ClassUnloading && !UseConcMarkSweepGC ||
jmasa@1625 1225 CMSClassUnloadingEnabled && UseConcMarkSweepGC ||
jmasa@1625 1226 ExplicitGCInvokesConcurrentAndUnloadsClasses &&
jmasa@1625 1227 UseConcMarkSweepGC && should_unload_classes;
jmasa@1370 1228 RememberKlassesChecker mx(must_remember_klasses);
jmasa@1370 1229 #endif
duke@435 1230 // Soft references
duke@435 1231 {
duke@435 1232 TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1233 false, gclog_or_tty);
jmasa@2188 1234 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1235 if (yield->should_return()) {
ysr@887 1236 return;
ysr@887 1237 }
duke@435 1238 preclean_discovered_reflist(_discoveredSoftRefs[i], is_alive,
duke@435 1239 keep_alive, complete_gc, yield);
duke@435 1240 }
duke@435 1241 }
duke@435 1242
duke@435 1243 // Weak references
duke@435 1244 {
duke@435 1245 TraceTime tt("Preclean WeakReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1246 false, gclog_or_tty);
ysr@2651 1247 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1248 if (yield->should_return()) {
ysr@887 1249 return;
ysr@887 1250 }
duke@435 1251 preclean_discovered_reflist(_discoveredWeakRefs[i], is_alive,
duke@435 1252 keep_alive, complete_gc, yield);
duke@435 1253 }
duke@435 1254 }
duke@435 1255
duke@435 1256 // Final references
duke@435 1257 {
duke@435 1258 TraceTime tt("Preclean FinalReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1259 false, gclog_or_tty);
ysr@2651 1260 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1261 if (yield->should_return()) {
ysr@887 1262 return;
ysr@887 1263 }
duke@435 1264 preclean_discovered_reflist(_discoveredFinalRefs[i], is_alive,
duke@435 1265 keep_alive, complete_gc, yield);
duke@435 1266 }
duke@435 1267 }
duke@435 1268
duke@435 1269 // Phantom references
duke@435 1270 {
duke@435 1271 TraceTime tt("Preclean PhantomReferences", PrintGCDetails && PrintReferenceGC,
duke@435 1272 false, gclog_or_tty);
ysr@2651 1273 for (int i = 0; i < _max_num_q; i++) {
ysr@887 1274 if (yield->should_return()) {
ysr@887 1275 return;
ysr@887 1276 }
duke@435 1277 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
duke@435 1278 keep_alive, complete_gc, yield);
duke@435 1279 }
duke@435 1280 }
duke@435 1281 }
duke@435 1282
duke@435 1283 // Walk the given discovered ref list, and remove all reference objects
duke@435 1284 // whose referents are still alive, whose referents are NULL or which
ysr@887 1285 // are not active (have a non-NULL next field). NOTE: When we are
ysr@887 1286 // thus precleaning the ref lists (which happens single-threaded today),
ysr@887 1287 // we do not disable refs discovery to honour the correct semantics of
ysr@887 1288 // java.lang.Reference. As a result, we need to be careful below
ysr@887 1289 // that ref removal steps interleave safely with ref discovery steps
ysr@887 1290 // (in this thread).
coleenp@548 1291 void
coleenp@548 1292 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
coleenp@548 1293 BoolObjectClosure* is_alive,
coleenp@548 1294 OopClosure* keep_alive,
coleenp@548 1295 VoidClosure* complete_gc,
coleenp@548 1296 YieldClosure* yield) {
duke@435 1297 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
duke@435 1298 while (iter.has_next()) {
duke@435 1299 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
coleenp@548 1300 oop obj = iter.obj();
coleenp@548 1301 oop next = java_lang_ref_Reference::next(obj);
duke@435 1302 if (iter.referent() == NULL || iter.is_referent_alive() ||
coleenp@548 1303 next != NULL) {
duke@435 1304 // The referent has been cleared, or is alive, or the Reference is not
duke@435 1305 // active; we need to trace and mark its cohort.
duke@435 1306 if (TraceReferenceGC) {
duke@435 1307 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
duke@435 1308 iter.obj(), iter.obj()->blueprint()->internal_name());
duke@435 1309 }
duke@435 1310 // Remove Reference object from list
duke@435 1311 iter.remove();
duke@435 1312 // Keep alive its cohort.
duke@435 1313 iter.make_referent_alive();
coleenp@548 1314 if (UseCompressedOops) {
coleenp@548 1315 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1316 keep_alive->do_oop(next_addr);
coleenp@548 1317 } else {
coleenp@548 1318 oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
coleenp@548 1319 keep_alive->do_oop(next_addr);
coleenp@548 1320 }
ysr@887 1321 iter.move_to_next();
duke@435 1322 } else {
duke@435 1323 iter.next();
duke@435 1324 }
duke@435 1325 }
duke@435 1326 // Close the reachable set
duke@435 1327 complete_gc->do_void();
duke@435 1328
duke@435 1329 NOT_PRODUCT(
ysr@2651 1330 if (PrintGCDetails && PrintReferenceGC && (iter.processed() > 0)) {
jmasa@2188 1331 gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
jmasa@2188 1332 "Refs in discovered list " INTPTR_FORMAT,
jmasa@2188 1333 iter.removed(), iter.processed(), (address)refs_list.head());
duke@435 1334 }
duke@435 1335 )
duke@435 1336 }
duke@435 1337
duke@435 1338 const char* ReferenceProcessor::list_name(int i) {
johnc@3175 1339 assert(i >= 0 && i <= _max_num_q * number_of_subclasses_of_ref(),
johnc@3175 1340 "Out of bounds index");
johnc@3175 1341
jmasa@2188 1342 int j = i / _max_num_q;
duke@435 1343 switch (j) {
duke@435 1344 case 0: return "SoftRef";
duke@435 1345 case 1: return "WeakRef";
duke@435 1346 case 2: return "FinalRef";
duke@435 1347 case 3: return "PhantomRef";
duke@435 1348 }
duke@435 1349 ShouldNotReachHere();
duke@435 1350 return NULL;
duke@435 1351 }
duke@435 1352
duke@435 1353 #ifndef PRODUCT
duke@435 1354 void ReferenceProcessor::verify_ok_to_handle_reflists() {
duke@435 1355 // empty for now
duke@435 1356 }
duke@435 1357 #endif
duke@435 1358
duke@435 1359 #ifndef PRODUCT
duke@435 1360 void ReferenceProcessor::clear_discovered_references() {
duke@435 1361 guarantee(!_discovering_refs, "Discovering refs?");
johnc@3175 1362 for (int i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
stefank@3115 1363 clear_discovered_references(_discoveredSoftRefs[i]);
duke@435 1364 }
duke@435 1365 }
stefank@3115 1366
duke@435 1367 #endif // PRODUCT

mercurial