src/share/vm/memory/referenceProcessor.cpp

changeset 6397
d60ecdb2773e
parent 6395
a258f8cb530f
child 6680
78bbf4d43a14
equal deleted inserted replaced
6396:f99e331f6ef6 6397:d60ecdb2773e
93 uint mt_processing_degree, 93 uint mt_processing_degree,
94 bool mt_discovery, 94 bool mt_discovery,
95 uint mt_discovery_degree, 95 uint mt_discovery_degree,
96 bool atomic_discovery, 96 bool atomic_discovery,
97 BoolObjectClosure* is_alive_non_header, 97 BoolObjectClosure* is_alive_non_header,
98 bool discovered_list_needs_barrier) : 98 bool discovered_list_needs_post_barrier) :
99 _discovering_refs(false), 99 _discovering_refs(false),
100 _enqueuing_is_done(false), 100 _enqueuing_is_done(false),
101 _is_alive_non_header(is_alive_non_header), 101 _is_alive_non_header(is_alive_non_header),
102 _discovered_list_needs_barrier(discovered_list_needs_barrier), 102 _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier),
103 _processing_is_mt(mt_processing), 103 _processing_is_mt(mt_processing),
104 _next_id(0) 104 _next_id(0)
105 { 105 {
106 _span = span; 106 _span = span;
107 _discovery_is_atomic = atomic_discovery; 107 _discovery_is_atomic = atomic_discovery;
488 // and _prev will be NULL. 488 // and _prev will be NULL.
489 new_next = _prev; 489 new_next = _prev;
490 } else { 490 } else {
491 new_next = _next; 491 new_next = _next;
492 } 492 }
493 493 // Remove Reference object from discovered list. Note that G1 does not need a
494 if (UseCompressedOops) { 494 // pre-barrier here because we know the Reference has already been found/marked,
495 // Remove Reference object from list. 495 // that's how it ended up in the discovered list in the first place.
496 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next); 496 oop_store_raw(_prev_next, new_next);
497 } else { 497 if (_discovered_list_needs_post_barrier && _prev_next != _refs_list.adr_head()) {
498 // Remove Reference object from list. 498 // Needs post-barrier and this is not the list head (which is not on the heap)
499 oopDesc::store_heap_oop((oop*)_prev_next, new_next); 499 oopDesc::bs()->write_ref_field(_prev_next, new_next);
500 } 500 }
501 NOT_PRODUCT(_removed++); 501 NOT_PRODUCT(_removed++);
502 _refs_list.dec_length(1); 502 _refs_list.dec_length(1);
503 } 503 }
504 504
542 ReferencePolicy* policy, 542 ReferencePolicy* policy,
543 BoolObjectClosure* is_alive, 543 BoolObjectClosure* is_alive,
544 OopClosure* keep_alive, 544 OopClosure* keep_alive,
545 VoidClosure* complete_gc) { 545 VoidClosure* complete_gc) {
546 assert(policy != NULL, "Must have a non-NULL policy"); 546 assert(policy != NULL, "Must have a non-NULL policy");
547 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 547 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
548 // Decide which softly reachable refs should be kept alive. 548 // Decide which softly reachable refs should be kept alive.
549 while (iter.has_next()) { 549 while (iter.has_next()) {
550 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */)); 550 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
551 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive(); 551 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
552 if (referent_is_dead && 552 if (referent_is_dead &&
582 void 582 void
583 ReferenceProcessor::pp2_work(DiscoveredList& refs_list, 583 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
584 BoolObjectClosure* is_alive, 584 BoolObjectClosure* is_alive,
585 OopClosure* keep_alive) { 585 OopClosure* keep_alive) {
586 assert(discovery_is_atomic(), "Error"); 586 assert(discovery_is_atomic(), "Error");
587 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 587 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
588 while (iter.has_next()) { 588 while (iter.has_next()) {
589 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); 589 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
590 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());) 590 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
591 assert(next == NULL, "Should not discover inactive Reference"); 591 assert(next == NULL, "Should not discover inactive Reference");
592 if (iter.is_referent_alive()) { 592 if (iter.is_referent_alive()) {
619 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list, 619 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
620 BoolObjectClosure* is_alive, 620 BoolObjectClosure* is_alive,
621 OopClosure* keep_alive, 621 OopClosure* keep_alive,
622 VoidClosure* complete_gc) { 622 VoidClosure* complete_gc) {
623 assert(!discovery_is_atomic(), "Error"); 623 assert(!discovery_is_atomic(), "Error");
624 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 624 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
625 while (iter.has_next()) { 625 while (iter.has_next()) {
626 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 626 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
627 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj()); 627 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
628 oop next = java_lang_ref_Reference::next(iter.obj()); 628 oop next = java_lang_ref_Reference::next(iter.obj());
629 if ((iter.referent() == NULL || iter.is_referent_alive() || 629 if ((iter.referent() == NULL || iter.is_referent_alive() ||
662 bool clear_referent, 662 bool clear_referent,
663 BoolObjectClosure* is_alive, 663 BoolObjectClosure* is_alive,
664 OopClosure* keep_alive, 664 OopClosure* keep_alive,
665 VoidClosure* complete_gc) { 665 VoidClosure* complete_gc) {
666 ResourceMark rm; 666 ResourceMark rm;
667 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 667 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
668 while (iter.has_next()) { 668 while (iter.has_next()) {
669 iter.update_discovered(); 669 iter.update_discovered();
670 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */)); 670 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
671 if (clear_referent) { 671 if (clear_referent) {
672 // NULL out referent pointer 672 // NULL out referent pointer
780 bool _clear_referent; 780 bool _clear_referent;
781 }; 781 };
782 782
783 void ReferenceProcessor::set_discovered(oop ref, oop value) { 783 void ReferenceProcessor::set_discovered(oop ref, oop value) {
784 java_lang_ref_Reference::set_discovered_raw(ref, value); 784 java_lang_ref_Reference::set_discovered_raw(ref, value);
785 if (_discovered_list_needs_barrier) { 785 if (_discovered_list_needs_post_barrier) {
786 oopDesc::bs()->write_ref_field(ref, value); 786 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(ref), value);
787 } 787 }
788 } 788 }
789 789
790 // Balances reference queues. 790 // Balances reference queues.
791 // Move entries from all queues[0, 1, ..., _max_num_q-1] to 791 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
978 } 978 }
979 } 979 }
980 980
981 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) { 981 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
982 assert(!discovery_is_atomic(), "Else why call this method?"); 982 assert(!discovery_is_atomic(), "Else why call this method?");
983 DiscoveredListIterator iter(refs_list, NULL, NULL); 983 DiscoveredListIterator iter(refs_list, NULL, NULL, _discovered_list_needs_post_barrier);
984 while (iter.has_next()) { 984 while (iter.has_next()) {
985 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 985 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
986 oop next = java_lang_ref_Reference::next(iter.obj()); 986 oop next = java_lang_ref_Reference::next(iter.obj());
987 assert(next->is_oop_or_null(), "bad next field"); 987 assert(next->is_oop_or_null(), "bad next field");
988 // If referent has been cleared or Reference is not active, 988 // If referent has been cleared or Reference is not active,
1074 // here is when *discovered_addr is NULL (see the CAS further below), 1074 // here is when *discovered_addr is NULL (see the CAS further below),
1075 // so this will expand to nothing. As a result, we have manually 1075 // so this will expand to nothing. As a result, we have manually
1076 // elided this out for G1, but left in the test for some future 1076 // elided this out for G1, but left in the test for some future
1077 // collector that might have need for a pre-barrier here, e.g.:- 1077 // collector that might have need for a pre-barrier here, e.g.:-
1078 // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered); 1078 // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1079 assert(!_discovered_list_needs_barrier || UseG1GC, 1079 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1080 "Need to check non-G1 collector: " 1080 "Need to check non-G1 collector: "
1081 "may need a pre-write-barrier for CAS from NULL below"); 1081 "may need a pre-write-barrier for CAS from NULL below");
1082 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr, 1082 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1083 NULL); 1083 NULL);
1084 if (retest == NULL) { 1084 if (retest == NULL) {
1085 // This thread just won the right to enqueue the object. 1085 // This thread just won the right to enqueue the object.
1086 // We have separate lists for enqueueing, so no synchronization 1086 // We have separate lists for enqueueing, so no synchronization
1087 // is necessary. 1087 // is necessary.
1088 refs_list.set_head(obj); 1088 refs_list.set_head(obj);
1089 refs_list.inc_length(1); 1089 refs_list.inc_length(1);
1090 if (_discovered_list_needs_barrier) { 1090 if (_discovered_list_needs_post_barrier) {
1091 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered); 1091 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1092 } 1092 }
1093 1093
1094 if (TraceReferenceGC) { 1094 if (TraceReferenceGC) {
1095 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)", 1095 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1238 } 1238 }
1239 1239
1240 if (_discovery_is_mt) { 1240 if (_discovery_is_mt) {
1241 add_to_discovered_list_mt(*list, obj, discovered_addr); 1241 add_to_discovered_list_mt(*list, obj, discovered_addr);
1242 } else { 1242 } else {
1243 // If "_discovered_list_needs_barrier", we do write barriers when 1243 // If "_discovered_list_needs_post_barrier", we do write barriers when
1244 // updating the discovered reference list. Otherwise, we do a raw store 1244 // updating the discovered reference list. Otherwise, we do a raw store
1245 // here: the field will be visited later when processing the discovered 1245 // here: the field will be visited later when processing the discovered
1246 // references. 1246 // references.
1247 oop current_head = list->head(); 1247 oop current_head = list->head();
1248 // The last ref must have its discovered field pointing to itself. 1248 // The last ref must have its discovered field pointing to itself.
1250 1250
1251 // As in the case further above, since we are over-writing a NULL 1251 // As in the case further above, since we are over-writing a NULL
1252 // pre-value, we can safely elide the pre-barrier here for the case of G1. 1252 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1253 // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered); 1253 // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1254 assert(discovered == NULL, "control point invariant"); 1254 assert(discovered == NULL, "control point invariant");
1255 assert(!_discovered_list_needs_barrier || UseG1GC, 1255 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1256 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below"); 1256 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1257 oop_store_raw(discovered_addr, next_discovered); 1257 oop_store_raw(discovered_addr, next_discovered);
1258 if (_discovered_list_needs_barrier) { 1258 if (_discovered_list_needs_post_barrier) {
1259 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered); 1259 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1260 } 1260 }
1261 list->set_head(obj); 1261 list->set_head(obj);
1262 list->inc_length(1); 1262 list->inc_length(1);
1263 1263
1349 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list, 1349 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1350 BoolObjectClosure* is_alive, 1350 BoolObjectClosure* is_alive,
1351 OopClosure* keep_alive, 1351 OopClosure* keep_alive,
1352 VoidClosure* complete_gc, 1352 VoidClosure* complete_gc,
1353 YieldClosure* yield) { 1353 YieldClosure* yield) {
1354 DiscoveredListIterator iter(refs_list, keep_alive, is_alive); 1354 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
1355 while (iter.has_next()) { 1355 while (iter.has_next()) {
1356 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */)); 1356 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1357 oop obj = iter.obj(); 1357 oop obj = iter.obj();
1358 oop next = java_lang_ref_Reference::next(obj); 1358 oop next = java_lang_ref_Reference::next(obj);
1359 if (iter.referent() == NULL || iter.is_referent_alive() || 1359 if (iter.referent() == NULL || iter.is_referent_alive() ||

mercurial