src/share/vm/memory/referenceProcessor.cpp

changeset 3115
c2bf0120ee5d
parent 2651
92da084fefc9
child 3117
eca1193ca245
     1.1 --- a/src/share/vm/memory/referenceProcessor.cpp	Tue Sep 06 21:03:51 2011 -0700
     1.2 +++ b/src/share/vm/memory/referenceProcessor.cpp	Thu Sep 01 16:18:17 2011 +0200
     1.3 @@ -35,7 +35,6 @@
     1.4  
     1.5  ReferencePolicy* ReferenceProcessor::_always_clear_soft_ref_policy = NULL;
     1.6  ReferencePolicy* ReferenceProcessor::_default_soft_ref_policy      = NULL;
     1.7 -oop              ReferenceProcessor::_sentinelRef = NULL;
     1.8  const int        subclasses_of_ref                = REF_PHANTOM - REF_OTHER;
     1.9  
    1.10  // List of discovered references.
    1.11 @@ -43,7 +42,7 @@
    1.12  public:
    1.13    DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
    1.14    oop head() const     {
    1.15 -     return UseCompressedOops ?  oopDesc::decode_heap_oop_not_null(_compressed_head) :
    1.16 +     return UseCompressedOops ?  oopDesc::decode_heap_oop(_compressed_head) :
    1.17                                  _oop_head;
    1.18    }
    1.19    HeapWord* adr_head() {
    1.20 @@ -53,12 +52,12 @@
    1.21    void   set_head(oop o) {
    1.22      if (UseCompressedOops) {
    1.23        // Must compress the head ptr.
    1.24 -      _compressed_head = oopDesc::encode_heap_oop_not_null(o);
    1.25 +      _compressed_head = oopDesc::encode_heap_oop(o);
    1.26      } else {
    1.27        _oop_head = o;
    1.28      }
    1.29    }
    1.30 -  bool   empty() const          { return head() == ReferenceProcessor::sentinel_ref(); }
    1.31 +  bool   empty() const          { return head() == NULL; }
    1.32    size_t length()               { return _len; }
    1.33    void   set_length(size_t len) { _len = len;  }
    1.34    void   inc_length(size_t inc) { _len += inc; assert(_len > 0, "Error"); }
    1.35 @@ -76,21 +75,9 @@
    1.36  }
    1.37  
    1.38  void ReferenceProcessor::init_statics() {
    1.39 -  assert(_sentinelRef == NULL, "should be initialized precisely once");
    1.40 -  EXCEPTION_MARK;
    1.41 -  _sentinelRef = instanceKlass::cast(
    1.42 -                    SystemDictionary::Reference_klass())->
    1.43 -                      allocate_permanent_instance(THREAD);
    1.44 -
    1.45    // Initialize the master soft ref clock.
    1.46    java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
    1.47  
    1.48 -  if (HAS_PENDING_EXCEPTION) {
    1.49 -      Handle ex(THREAD, PENDING_EXCEPTION);
    1.50 -      vm_exit_during_initialization(ex);
    1.51 -  }
    1.52 -  assert(_sentinelRef != NULL && _sentinelRef->is_oop(),
    1.53 -         "Just constructed it!");
    1.54    _always_clear_soft_ref_policy = new AlwaysClearPolicy();
    1.55    _default_soft_ref_policy      = new COMPILER2_PRESENT(LRUMaxHeapPolicy())
    1.56                                        NOT_COMPILER2(LRUCurrentHeapPolicy());
    1.57 @@ -130,10 +117,9 @@
    1.58    _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
    1.59    _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
    1.60    _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
    1.61 -  assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
    1.62 -  // Initialized all entries to _sentinelRef
    1.63 +  // Initialized all entries to NULL
    1.64    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
    1.65 -        _discoveredSoftRefs[i].set_head(sentinel_ref());
    1.66 +    _discoveredSoftRefs[i].set_head(NULL);
    1.67      _discoveredSoftRefs[i].set_length(0);
    1.68    }
    1.69    // If we do barreirs, cache a copy of the barrier set.
    1.70 @@ -167,10 +153,6 @@
    1.71    }
    1.72  }
    1.73  
    1.74 -void ReferenceProcessor::oops_do(OopClosure* f) {
    1.75 -  f->do_oop(adr_sentinel_ref());
    1.76 -}
    1.77 -
    1.78  void ReferenceProcessor::update_soft_ref_master_clock() {
    1.79    // Update (advance) the soft ref master clock field. This must be done
    1.80    // after processing the soft ref list.
    1.81 @@ -283,8 +265,6 @@
    1.82    }
    1.83  #endif
    1.84    JNIHandles::weak_oops_do(is_alive, keep_alive);
    1.85 -  // Finally remember to keep sentinel around
    1.86 -  keep_alive->do_oop(adr_sentinel_ref());
    1.87    complete_gc->do_void();
    1.88  }
    1.89  
    1.90 @@ -334,21 +314,22 @@
    1.91      gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
    1.92                             INTPTR_FORMAT, (address)refs_list.head());
    1.93    }
    1.94 -  oop obj = refs_list.head();
    1.95 +
    1.96 +  oop obj = NULL;
    1.97 +  oop next = refs_list.head();
    1.98    // Walk down the list, copying the discovered field into
    1.99 -  // the next field and clearing it (except for the last
   1.100 -  // non-sentinel object which is treated specially to avoid
   1.101 -  // confusion with an active reference).
   1.102 -  while (obj != sentinel_ref()) {
   1.103 +  // the next field and clearing it.
   1.104 +  while (obj != next) {
   1.105 +    obj = next;
   1.106      assert(obj->is_instanceRef(), "should be reference object");
   1.107 -    oop next = java_lang_ref_Reference::discovered(obj);
   1.108 +    next = java_lang_ref_Reference::discovered(obj);
   1.109      if (TraceReferenceGC && PrintGCDetails) {
   1.110        gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
   1.111                               obj, next);
   1.112      }
   1.113      assert(java_lang_ref_Reference::next(obj) == NULL,
   1.114             "The reference should not be enqueued");
   1.115 -    if (next == sentinel_ref()) {  // obj is last
   1.116 +    if (next == obj) {  // obj is last
   1.117        // Swap refs_list into pendling_list_addr and
   1.118        // set obj's next to what we read from pending_list_addr.
   1.119        oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
   1.120 @@ -366,7 +347,6 @@
   1.121        java_lang_ref_Reference::set_next(obj, next);
   1.122      }
   1.123      java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
   1.124 -    obj = next;
   1.125    }
   1.126  }
   1.127  
   1.128 @@ -376,10 +356,9 @@
   1.129    RefProcEnqueueTask(ReferenceProcessor& ref_processor,
   1.130                       DiscoveredList      discovered_refs[],
   1.131                       HeapWord*           pending_list_addr,
   1.132 -                     oop                 sentinel_ref,
   1.133                       int                 n_queues)
   1.134      : EnqueueTask(ref_processor, discovered_refs,
   1.135 -                  pending_list_addr, sentinel_ref, n_queues)
   1.136 +                  pending_list_addr, n_queues)
   1.137    { }
   1.138  
   1.139    virtual void work(unsigned int work_id) {
   1.140 @@ -396,7 +375,7 @@
   1.141           j++, index += _n_queues) {
   1.142        _ref_processor.enqueue_discovered_reflist(
   1.143          _refs_lists[index], _pending_list_addr);
   1.144 -      _refs_lists[index].set_head(_sentinel_ref);
   1.145 +      _refs_lists[index].set_head(NULL);
   1.146        _refs_lists[index].set_length(0);
   1.147      }
   1.148    }
   1.149 @@ -408,13 +387,13 @@
   1.150    if (_processing_is_mt && task_executor != NULL) {
   1.151      // Parallel code
   1.152      RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
   1.153 -                           pending_list_addr, sentinel_ref(), _max_num_q);
   1.154 +                           pending_list_addr, _max_num_q);
   1.155      task_executor->execute(tsk);
   1.156    } else {
   1.157      // Serial code: call the parent class's implementation
   1.158      for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
   1.159        enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
   1.160 -      _discoveredSoftRefs[i].set_head(sentinel_ref());
   1.161 +      _discoveredSoftRefs[i].set_head(NULL);
   1.162        _discoveredSoftRefs[i].set_length(0);
   1.163      }
   1.164    }
   1.165 @@ -428,7 +407,7 @@
   1.166                                  BoolObjectClosure* is_alive);
   1.167  
   1.168    // End Of List.
   1.169 -  inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); }
   1.170 +  inline bool has_next() const { return _ref != NULL; }
   1.171  
   1.172    // Get oop to the Reference object.
   1.173    inline oop obj() const { return _ref; }
   1.174 @@ -468,9 +447,13 @@
   1.175    inline void update_discovered() {
   1.176      // First _prev_next ref actually points into DiscoveredList (gross).
   1.177      if (UseCompressedOops) {
   1.178 -      _keep_alive->do_oop((narrowOop*)_prev_next);
   1.179 +      if (!oopDesc::is_null(*(narrowOop*)_prev_next)) {
   1.180 +        _keep_alive->do_oop((narrowOop*)_prev_next);
   1.181 +      }
   1.182      } else {
   1.183 -      _keep_alive->do_oop((oop*)_prev_next);
   1.184 +      if (!oopDesc::is_null(*(oop*)_prev_next)) {
   1.185 +        _keep_alive->do_oop((oop*)_prev_next);
   1.186 +      }
   1.187      }
   1.188    }
   1.189  
   1.190 @@ -488,6 +471,7 @@
   1.191  private:
   1.192    DiscoveredList&    _refs_list;
   1.193    HeapWord*          _prev_next;
   1.194 +  oop                _prev;
   1.195    oop                _ref;
   1.196    HeapWord*          _discovered_addr;
   1.197    oop                _next;
   1.198 @@ -509,6 +493,7 @@
   1.199                                                        BoolObjectClosure* is_alive)
   1.200    : _refs_list(refs_list),
   1.201      _prev_next(refs_list.adr_head()),
   1.202 +    _prev(NULL),
   1.203      _ref(refs_list.head()),
   1.204  #ifdef ASSERT
   1.205      _first_seen(refs_list.head()),
   1.206 @@ -517,7 +502,7 @@
   1.207      _processed(0),
   1.208      _removed(0),
   1.209  #endif
   1.210 -    _next(refs_list.head()),
   1.211 +    _next(NULL),
   1.212      _keep_alive(keep_alive),
   1.213      _is_alive(is_alive)
   1.214  { }
   1.215 @@ -544,26 +529,43 @@
   1.216  
   1.217  inline void DiscoveredListIterator::next() {
   1.218    _prev_next = _discovered_addr;
   1.219 +  _prev = _ref;
   1.220    move_to_next();
   1.221  }
   1.222  
   1.223  inline void DiscoveredListIterator::remove() {
   1.224    assert(_ref->is_oop(), "Dropping a bad reference");
   1.225    oop_store_raw(_discovered_addr, NULL);
   1.226 +
   1.227    // First _prev_next ref actually points into DiscoveredList (gross).
   1.228 +  oop new_next;
   1.229 +  if (_next == _ref) {
   1.230 +    // At the end of the list, we should make _prev point to itself.
   1.231 +    // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
   1.232 +    // and _prev will be NULL.
   1.233 +    new_next = _prev;
   1.234 +  } else {
   1.235 +    new_next = _next;
   1.236 +  }
   1.237 +
   1.238    if (UseCompressedOops) {
   1.239      // Remove Reference object from list.
   1.240 -    oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next);
   1.241 +    oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
   1.242    } else {
   1.243      // Remove Reference object from list.
   1.244 -    oopDesc::store_heap_oop((oop*)_prev_next, _next);
   1.245 +    oopDesc::store_heap_oop((oop*)_prev_next, new_next);
   1.246    }
   1.247    NOT_PRODUCT(_removed++);
   1.248    _refs_list.dec_length(1);
   1.249  }
   1.250  
   1.251  inline void DiscoveredListIterator::move_to_next() {
   1.252 -  _ref = _next;
   1.253 +  if (_ref == _next) {
   1.254 +    // End of the list.
   1.255 +    _ref = NULL;
   1.256 +  } else {
   1.257 +    _ref = _next;
   1.258 +  }
   1.259    assert(_ref != _first_seen, "cyclic ref_list found");
   1.260    NOT_PRODUCT(_processed++);
   1.261  }
   1.262 @@ -725,22 +727,28 @@
   1.263      assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
   1.264      iter.next();
   1.265    }
   1.266 -  // Remember to keep sentinel pointer around
   1.267 +  // Remember to update the next pointer of the last ref.
   1.268    iter.update_discovered();
   1.269    // Close the reachable set
   1.270    complete_gc->do_void();
   1.271  }
   1.272  
   1.273  void
   1.274 +ReferenceProcessor::clear_discovered_references(DiscoveredList& refs_list) {
   1.275 +  oop obj = NULL;
   1.276 +  oop next = refs_list.head();
   1.277 +  while (next != obj) {
   1.278 +    obj = next;
   1.279 +    next = java_lang_ref_Reference::discovered(obj);
   1.280 +    java_lang_ref_Reference::set_discovered_raw(obj, NULL);
   1.281 +  }
   1.282 +  refs_list.set_head(NULL);
   1.283 +  refs_list.set_length(0);
   1.284 +}
   1.285 +
   1.286 +void
   1.287  ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
   1.288 -  oop obj = refs_list.head();
   1.289 -  while (obj != sentinel_ref()) {
   1.290 -    oop discovered = java_lang_ref_Reference::discovered(obj);
   1.291 -    java_lang_ref_Reference::set_discovered_raw(obj, NULL);
   1.292 -    obj = discovered;
   1.293 -  }
   1.294 -  refs_list.set_head(sentinel_ref());
   1.295 -  refs_list.set_length(0);
   1.296 +  clear_discovered_references(refs_list);
   1.297  }
   1.298  
   1.299  void ReferenceProcessor::abandon_partial_discovery() {
   1.300 @@ -859,6 +867,9 @@
   1.301            refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
   1.302                                avg_refs - ref_lists[to_idx].length());
   1.303          }
   1.304 +
   1.305 +        assert(refs_to_move > 0, "otherwise the code below will fail");
   1.306 +
   1.307          oop move_head = ref_lists[from_idx].head();
   1.308          oop move_tail = move_head;
   1.309          oop new_head  = move_head;
   1.310 @@ -867,10 +878,24 @@
   1.311            move_tail = new_head;
   1.312            new_head = java_lang_ref_Reference::discovered(new_head);
   1.313          }
   1.314 -        java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
   1.315 +
   1.316 +        // Add the chain to the to list.
   1.317 +        if (ref_lists[to_idx].head() == NULL) {
   1.318 +          // to list is empty. Make a loop at the end.
   1.319 +          java_lang_ref_Reference::set_discovered(move_tail, move_tail);
   1.320 +        } else {
   1.321 +          java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
   1.322 +        }
   1.323          ref_lists[to_idx].set_head(move_head);
   1.324          ref_lists[to_idx].inc_length(refs_to_move);
   1.325 -        ref_lists[from_idx].set_head(new_head);
   1.326 +
   1.327 +        // Remove the chain from the from list.
   1.328 +        if (move_tail == new_head) {
   1.329 +          // We found the end of the from list.
   1.330 +          ref_lists[from_idx].set_head(NULL);
   1.331 +        } else {
   1.332 +          ref_lists[from_idx].set_head(new_head);
   1.333 +        }
   1.334          ref_lists[from_idx].dec_length(refs_to_move);
   1.335          if (ref_lists[from_idx].length() == 0) {
   1.336            break;
   1.337 @@ -1082,6 +1107,8 @@
   1.338    // First we must make sure this object is only enqueued once. CAS in a non null
   1.339    // discovered_addr.
   1.340    oop current_head = refs_list.head();
   1.341 +  // The last ref must have its discovered field pointing to itself.
   1.342 +  oop next_discovered = (current_head != NULL) ? current_head : obj;
   1.343  
   1.344    // Note: In the case of G1, this specific pre-barrier is strictly
   1.345    // not necessary because the only case we are interested in
   1.346 @@ -1091,13 +1118,13 @@
   1.347    // collector that might have need for a pre-barrier here.
   1.348    if (_discovered_list_needs_barrier && !UseG1GC) {
   1.349      if (UseCompressedOops) {
   1.350 -      _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head);
   1.351 +      _bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered);
   1.352      } else {
   1.353 -      _bs->write_ref_field_pre((oop*)discovered_addr, current_head);
   1.354 +      _bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
   1.355      }
   1.356      guarantee(false, "Need to check non-G1 collector");
   1.357    }
   1.358 -  oop retest = oopDesc::atomic_compare_exchange_oop(current_head, discovered_addr,
   1.359 +  oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
   1.360                                                      NULL);
   1.361    if (retest == NULL) {
   1.362      // This thread just won the right to enqueue the object.
   1.363 @@ -1106,7 +1133,7 @@
   1.364      refs_list.set_head(obj);
   1.365      refs_list.inc_length(1);
   1.366      if (_discovered_list_needs_barrier) {
   1.367 -      _bs->write_ref_field((void*)discovered_addr, current_head);
   1.368 +      _bs->write_ref_field((void*)discovered_addr, next_discovered);
   1.369      }
   1.370  
   1.371      if (TraceReferenceGC) {
   1.372 @@ -1262,20 +1289,23 @@
   1.373      // here: the field will be visited later when processing the discovered
   1.374      // references.
   1.375      oop current_head = list->head();
   1.376 +    // The last ref must have its discovered field pointing to itself.
   1.377 +    oop next_discovered = (current_head != NULL) ? current_head : obj;
   1.378 +
   1.379      // As in the case further above, since we are over-writing a NULL
   1.380      // pre-value, we can safely elide the pre-barrier here for the case of G1.
   1.381      assert(discovered == NULL, "control point invariant");
   1.382      if (_discovered_list_needs_barrier && !UseG1GC) { // safe to elide for G1
   1.383        if (UseCompressedOops) {
   1.384 -        _bs->write_ref_field_pre((narrowOop*)discovered_addr, current_head);
   1.385 +        _bs->write_ref_field_pre((narrowOop*)discovered_addr, next_discovered);
   1.386        } else {
   1.387 -        _bs->write_ref_field_pre((oop*)discovered_addr, current_head);
   1.388 +        _bs->write_ref_field_pre((oop*)discovered_addr, next_discovered);
   1.389        }
   1.390        guarantee(false, "Need to check non-G1 collector");
   1.391      }
   1.392 -    oop_store_raw(discovered_addr, current_head);
   1.393 +    oop_store_raw(discovered_addr, next_discovered);
   1.394      if (_discovered_list_needs_barrier) {
   1.395 -      _bs->write_ref_field((void*)discovered_addr, current_head);
   1.396 +      _bs->write_ref_field((void*)discovered_addr, next_discovered);
   1.397      }
   1.398      list->set_head(obj);
   1.399      list->inc_length(1);
   1.400 @@ -1437,22 +1467,12 @@
   1.401  }
   1.402  #endif
   1.403  
   1.404 -void ReferenceProcessor::verify() {
   1.405 -  guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
   1.406 -}
   1.407 -
   1.408  #ifndef PRODUCT
   1.409  void ReferenceProcessor::clear_discovered_references() {
   1.410    guarantee(!_discovering_refs, "Discovering refs?");
   1.411    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
   1.412 -    oop obj = _discoveredSoftRefs[i].head();
   1.413 -    while (obj != sentinel_ref()) {
   1.414 -      oop next = java_lang_ref_Reference::discovered(obj);
   1.415 -      java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
   1.416 -      obj = next;
   1.417 -    }
   1.418 -    _discoveredSoftRefs[i].set_head(sentinel_ref());
   1.419 -    _discoveredSoftRefs[i].set_length(0);
   1.420 +    clear_discovered_references(_discoveredSoftRefs[i]);
   1.421    }
   1.422  }
   1.423 +
   1.424  #endif // PRODUCT

mercurial