src/share/vm/memory/referenceProcessor.cpp

changeset 2188
8b10f48633dc
parent 1907
c18cbe5936b8
child 2314
f95d63e2154a
     1.1 --- a/src/share/vm/memory/referenceProcessor.cpp	Thu Sep 16 13:45:55 2010 -0700
     1.2 +++ b/src/share/vm/memory/referenceProcessor.cpp	Mon Sep 20 14:38:38 2010 -0700
     1.3 @@ -1,5 +1,5 @@
     1.4  /*
     1.5 - * Copyright (c) 2001, 2009, Oracle and/or its affiliates. All rights reserved.
     1.6 + * Copyright (c) 2001, 2010, Oracle and/or its affiliates. All rights reserved.
     1.7   * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     1.8   *
     1.9   * This code is free software; you can redistribute it and/or modify it
    1.10 @@ -137,16 +137,17 @@
    1.11    _discovery_is_atomic = atomic_discovery;
    1.12    _discovery_is_mt     = mt_discovery;
    1.13    _num_q               = mt_degree;
    1.14 -  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _num_q * subclasses_of_ref);
    1.15 +  _max_num_q           = mt_degree;
    1.16 +  _discoveredSoftRefs  = NEW_C_HEAP_ARRAY(DiscoveredList, _max_num_q * subclasses_of_ref);
    1.17    if (_discoveredSoftRefs == NULL) {
    1.18      vm_exit_during_initialization("Could not allocated RefProc Array");
    1.19    }
    1.20 -  _discoveredWeakRefs    = &_discoveredSoftRefs[_num_q];
    1.21 -  _discoveredFinalRefs   = &_discoveredWeakRefs[_num_q];
    1.22 -  _discoveredPhantomRefs = &_discoveredFinalRefs[_num_q];
    1.23 +  _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
    1.24 +  _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
    1.25 +  _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
    1.26    assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
    1.27    // Initialized all entries to _sentinelRef
    1.28 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
    1.29 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
    1.30          _discoveredSoftRefs[i].set_head(sentinel_ref());
    1.31      _discoveredSoftRefs[i].set_length(0);
    1.32    }
    1.33 @@ -159,7 +160,7 @@
    1.34  #ifndef PRODUCT
    1.35  void ReferenceProcessor::verify_no_references_recorded() {
    1.36    guarantee(!_discovering_refs, "Discovering refs?");
    1.37 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
    1.38 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
    1.39      guarantee(_discoveredSoftRefs[i].empty(),
    1.40                "Found non-empty discovered list");
    1.41    }
    1.42 @@ -167,7 +168,11 @@
    1.43  #endif
    1.44  
    1.45  void ReferenceProcessor::weak_oops_do(OopClosure* f) {
    1.46 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
    1.47 +  // Should this instead be
    1.48 +  // for (int i = 0; i < subclasses_of_ref; i++_ {
    1.49 +  //   for (int j = 0; j < _num_q; j++) {
    1.50 +  //     int index = i * _max_num_q + j;
    1.51 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
    1.52      if (UseCompressedOops) {
    1.53        f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
    1.54      } else {
    1.55 @@ -395,7 +400,15 @@
    1.56      assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds");
    1.57      // Simplest first cut: static partitioning.
    1.58      int index = work_id;
    1.59 -    for (int j = 0; j < subclasses_of_ref; j++, index += _n_queues) {
    1.60 +    // The increment on "index" must correspond to the maximum number of queues
    1.61 +    // (n_queues) with which that ReferenceProcessor was created.  That
    1.62 +    // is because of the "clever" way the discovered references lists were
    1.63 +    // allocated and are indexed into.  That number is ParallelGCThreads
    1.64 +    // currently.  Assert that.
    1.65 +    assert(_n_queues == (int) ParallelGCThreads, "Different number not expected");
    1.66 +    for (int j = 0;
    1.67 +         j < subclasses_of_ref;
    1.68 +         j++, index += _n_queues) {
    1.69        _ref_processor.enqueue_discovered_reflist(
    1.70          _refs_lists[index], _pending_list_addr);
    1.71        _refs_lists[index].set_head(_sentinel_ref);
    1.72 @@ -410,11 +423,11 @@
    1.73    if (_processing_is_mt && task_executor != NULL) {
    1.74      // Parallel code
    1.75      RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
    1.76 -                           pending_list_addr, sentinel_ref(), _num_q);
    1.77 +                           pending_list_addr, sentinel_ref(), _max_num_q);
    1.78      task_executor->execute(tsk);
    1.79    } else {
    1.80      // Serial code: call the parent class's implementation
    1.81 -    for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
    1.82 +    for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
    1.83        enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
    1.84        _discoveredSoftRefs[i].set_head(sentinel_ref());
    1.85        _discoveredSoftRefs[i].set_length(0);
    1.86 @@ -614,8 +627,9 @@
    1.87    complete_gc->do_void();
    1.88    NOT_PRODUCT(
    1.89      if (PrintGCDetails && TraceReferenceGC) {
    1.90 -      gclog_or_tty->print(" Dropped %d dead Refs out of %d "
    1.91 -        "discovered Refs by policy ", iter.removed(), iter.processed());
    1.92 +      gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
    1.93 +        "discovered Refs by policy  list " INTPTR_FORMAT,
    1.94 +        iter.removed(), iter.processed(), (address)refs_list.head());
    1.95      }
    1.96    )
    1.97  }
    1.98 @@ -651,8 +665,9 @@
    1.99    }
   1.100    NOT_PRODUCT(
   1.101      if (PrintGCDetails && TraceReferenceGC) {
   1.102 -      gclog_or_tty->print(" Dropped %d active Refs out of %d "
   1.103 -        "Refs in discovered list ", iter.removed(), iter.processed());
   1.104 +      gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
   1.105 +        "Refs in discovered list " INTPTR_FORMAT,
   1.106 +        iter.removed(), iter.processed(), (address)refs_list.head());
   1.107      }
   1.108    )
   1.109  }
   1.110 @@ -689,8 +704,9 @@
   1.111    complete_gc->do_void();
   1.112    NOT_PRODUCT(
   1.113      if (PrintGCDetails && TraceReferenceGC) {
   1.114 -      gclog_or_tty->print(" Dropped %d active Refs out of %d "
   1.115 -        "Refs in discovered list ", iter.removed(), iter.processed());
   1.116 +      gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
   1.117 +        "Refs in discovered list " INTPTR_FORMAT,
   1.118 +        iter.removed(), iter.processed(), (address)refs_list.head());
   1.119      }
   1.120    )
   1.121  }
   1.122 @@ -704,6 +720,7 @@
   1.123                                     BoolObjectClosure* is_alive,
   1.124                                     OopClosure*        keep_alive,
   1.125                                     VoidClosure*       complete_gc) {
   1.126 +  ResourceMark rm;
   1.127    DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
   1.128    while (iter.has_next()) {
   1.129      iter.update_discovered();
   1.130 @@ -743,8 +760,8 @@
   1.131  
   1.132  void ReferenceProcessor::abandon_partial_discovery() {
   1.133    // loop over the lists
   1.134 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
   1.135 -    if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
   1.136 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
   1.137 +    if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
   1.138        gclog_or_tty->print_cr(
   1.139          "\nAbandoning %s discovered list",
   1.140          list_name(i));
   1.141 @@ -766,7 +783,9 @@
   1.142                      OopClosure& keep_alive,
   1.143                      VoidClosure& complete_gc)
   1.144    {
   1.145 -    _ref_processor.process_phase1(_refs_lists[i], _policy,
   1.146 +    Thread* thr = Thread::current();
   1.147 +    int refs_list_index = ((WorkerThread*)thr)->id();
   1.148 +    _ref_processor.process_phase1(_refs_lists[refs_list_index], _policy,
   1.149                                    &is_alive, &keep_alive, &complete_gc);
   1.150    }
   1.151  private:
   1.152 @@ -802,6 +821,11 @@
   1.153                      OopClosure& keep_alive,
   1.154                      VoidClosure& complete_gc)
   1.155    {
   1.156 +    // Don't use "refs_list_index" calculated in this way because
   1.157 +    // balance_queues() has moved the Ref's into the first n queues.
   1.158 +    // Thread* thr = Thread::current();
   1.159 +    // int refs_list_index = ((WorkerThread*)thr)->id();
   1.160 +    // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
   1.161      _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
   1.162                                    &is_alive, &keep_alive, &complete_gc);
   1.163    }
   1.164 @@ -810,23 +834,47 @@
   1.165  };
   1.166  
   1.167  // Balances reference queues.
   1.168 +// Move entries from all queues[0, 1, ..., _max_num_q-1] to
   1.169 +// queues[0, 1, ..., _num_q-1] because only the first _num_q
   1.170 +// corresponding to the active workers will be processed.
   1.171  void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
   1.172  {
   1.173    // calculate total length
   1.174    size_t total_refs = 0;
   1.175 -  for (int i = 0; i < _num_q; ++i) {
   1.176 +  if (TraceReferenceGC && PrintGCDetails) {
   1.177 +    gclog_or_tty->print_cr("\nBalance ref_lists ");
   1.178 +  }
   1.179 +
   1.180 +  for (int i = 0; i < _max_num_q; ++i) {
   1.181      total_refs += ref_lists[i].length();
   1.182 +    if (TraceReferenceGC && PrintGCDetails) {
   1.183 +      gclog_or_tty->print("%d ", ref_lists[i].length());
   1.184 +    }
   1.185 +  }
   1.186 +  if (TraceReferenceGC && PrintGCDetails) {
   1.187 +    gclog_or_tty->print_cr(" = %d", total_refs);
   1.188    }
   1.189    size_t avg_refs = total_refs / _num_q + 1;
   1.190    int to_idx = 0;
   1.191 -  for (int from_idx = 0; from_idx < _num_q; from_idx++) {
   1.192 -    while (ref_lists[from_idx].length() > avg_refs) {
   1.193 +  for (int from_idx = 0; from_idx < _max_num_q; from_idx++) {
   1.194 +    bool move_all = false;
   1.195 +    if (from_idx >= _num_q) {
   1.196 +      move_all = ref_lists[from_idx].length() > 0;
   1.197 +    }
   1.198 +    while ((ref_lists[from_idx].length() > avg_refs) ||
   1.199 +           move_all) {
   1.200        assert(to_idx < _num_q, "Sanity Check!");
   1.201        if (ref_lists[to_idx].length() < avg_refs) {
   1.202          // move superfluous refs
   1.203 -        size_t refs_to_move =
   1.204 -          MIN2(ref_lists[from_idx].length() - avg_refs,
   1.205 -               avg_refs - ref_lists[to_idx].length());
   1.206 +        size_t refs_to_move;
   1.207 +        // Move all the Ref's if the from queue will not be processed.
   1.208 +        if (move_all) {
   1.209 +          refs_to_move = MIN2(ref_lists[from_idx].length(),
   1.210 +                              avg_refs - ref_lists[to_idx].length());
   1.211 +        } else {
   1.212 +          refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
   1.213 +                              avg_refs - ref_lists[to_idx].length());
   1.214 +        }
   1.215          oop move_head = ref_lists[from_idx].head();
   1.216          oop move_tail = move_head;
   1.217          oop new_head  = move_head;
   1.218 @@ -840,11 +888,35 @@
   1.219          ref_lists[to_idx].inc_length(refs_to_move);
   1.220          ref_lists[from_idx].set_head(new_head);
   1.221          ref_lists[from_idx].dec_length(refs_to_move);
   1.222 +        if (ref_lists[from_idx].length() == 0) {
   1.223 +          break;
   1.224 +        }
   1.225        } else {
   1.226 -        ++to_idx;
   1.227 +        to_idx = (to_idx + 1) % _num_q;
   1.228        }
   1.229      }
   1.230    }
   1.231 +#ifdef ASSERT
   1.232 +  size_t balanced_total_refs = 0;
   1.233 +  for (int i = 0; i < _max_num_q; ++i) {
   1.234 +    balanced_total_refs += ref_lists[i].length();
   1.235 +    if (TraceReferenceGC && PrintGCDetails) {
   1.236 +      gclog_or_tty->print("%d ", ref_lists[i].length());
   1.237 +    }
   1.238 +  }
   1.239 +  if (TraceReferenceGC && PrintGCDetails) {
   1.240 +    gclog_or_tty->print_cr(" = %d", balanced_total_refs);
   1.241 +    gclog_or_tty->flush();
   1.242 +  }
   1.243 +  assert(total_refs == balanced_total_refs, "Balancing was incomplete");
   1.244 +#endif
   1.245 +}
   1.246 +
   1.247 +void ReferenceProcessor::balance_all_queues() {
   1.248 +  balance_queues(_discoveredSoftRefs);
   1.249 +  balance_queues(_discoveredWeakRefs);
   1.250 +  balance_queues(_discoveredFinalRefs);
   1.251 +  balance_queues(_discoveredPhantomRefs);
   1.252  }
   1.253  
   1.254  void
   1.255 @@ -857,8 +929,17 @@
   1.256    VoidClosure*                 complete_gc,
   1.257    AbstractRefProcTaskExecutor* task_executor)
   1.258  {
   1.259 -  bool mt = task_executor != NULL && _processing_is_mt;
   1.260 -  if (mt && ParallelRefProcBalancingEnabled) {
   1.261 +  bool mt_processing = task_executor != NULL && _processing_is_mt;
   1.262 +  // If discovery used MT and a dynamic number of GC threads, then
   1.263 +  // the queues must be balanced for correctness if fewer than the
   1.264 +  // maximum number of queues were used.  The number of queue used
   1.265 +  // during discovery may be different than the number to be used
   1.266 +  // for processing so don't depend of _num_q < _max_num_q as part
   1.267 +  // of the test.
   1.268 +  bool must_balance = _discovery_is_mt;
   1.269 +
   1.270 +  if ((mt_processing && ParallelRefProcBalancingEnabled) ||
   1.271 +      must_balance) {
   1.272      balance_queues(refs_lists);
   1.273    }
   1.274    if (PrintReferenceGC && PrintGCDetails) {
   1.275 @@ -875,7 +956,7 @@
   1.276    //   policy reasons. Keep alive the transitive closure of all
   1.277    //   such referents.
   1.278    if (policy != NULL) {
   1.279 -    if (mt) {
   1.280 +    if (mt_processing) {
   1.281        RefProcPhase1Task phase1(*this, refs_lists, policy, true /*marks_oops_alive*/);
   1.282        task_executor->execute(phase1);
   1.283      } else {
   1.284 @@ -891,7 +972,7 @@
   1.285  
   1.286    // Phase 2:
   1.287    // . Traverse the list and remove any refs whose referents are alive.
   1.288 -  if (mt) {
   1.289 +  if (mt_processing) {
   1.290      RefProcPhase2Task phase2(*this, refs_lists, !discovery_is_atomic() /*marks_oops_alive*/);
   1.291      task_executor->execute(phase2);
   1.292    } else {
   1.293 @@ -902,7 +983,7 @@
   1.294  
   1.295    // Phase 3:
   1.296    // . Traverse the list and process referents as appropriate.
   1.297 -  if (mt) {
   1.298 +  if (mt_processing) {
   1.299      RefProcPhase3Task phase3(*this, refs_lists, clear_referent, true /*marks_oops_alive*/);
   1.300      task_executor->execute(phase3);
   1.301    } else {
   1.302 @@ -915,7 +996,11 @@
   1.303  
   1.304  void ReferenceProcessor::clean_up_discovered_references() {
   1.305    // loop over the lists
   1.306 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
   1.307 +  // Should this instead be
   1.308 +  // for (int i = 0; i < subclasses_of_ref; i++_ {
   1.309 +  //   for (int j = 0; j < _num_q; j++) {
   1.310 +  //     int index = i * _max_num_q + j;
   1.311 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
   1.312      if (TraceReferenceGC && PrintGCDetails && ((i % _num_q) == 0)) {
   1.313        gclog_or_tty->print_cr(
   1.314          "\nScrubbing %s discovered list of Null referents",
   1.315 @@ -976,7 +1061,7 @@
   1.316        id = next_id();
   1.317      }
   1.318    }
   1.319 -  assert(0 <= id && id < _num_q, "Id is out-of-bounds (call Freud?)");
   1.320 +  assert(0 <= id && id < _max_num_q, "Id is out-of-bounds (call Freud?)");
   1.321  
   1.322    // Get the discovered queue to which we will add
   1.323    DiscoveredList* list = NULL;
   1.324 @@ -1001,6 +1086,10 @@
   1.325      default:
   1.326        ShouldNotReachHere();
   1.327    }
   1.328 +  if (TraceReferenceGC && PrintGCDetails) {
   1.329 +    gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT,
   1.330 +      id, list);
   1.331 +  }
   1.332    return list;
   1.333  }
   1.334  
   1.335 @@ -1243,7 +1332,7 @@
   1.336    {
   1.337      TraceTime tt("Preclean SoftReferences", PrintGCDetails && PrintReferenceGC,
   1.338                false, gclog_or_tty);
   1.339 -    for (int i = 0; i < _num_q; i++) {
   1.340 +    for (int i = 0; i < _max_num_q; i++) {
   1.341        if (yield->should_return()) {
   1.342          return;
   1.343        }
   1.344 @@ -1340,15 +1429,16 @@
   1.345  
   1.346    NOT_PRODUCT(
   1.347      if (PrintGCDetails && PrintReferenceGC) {
   1.348 -      gclog_or_tty->print(" Dropped %d Refs out of %d "
   1.349 -        "Refs in discovered list ", iter.removed(), iter.processed());
   1.350 +      gclog_or_tty->print_cr(" Dropped %d Refs out of %d "
   1.351 +        "Refs in discovered list " INTPTR_FORMAT,
   1.352 +        iter.removed(), iter.processed(), (address)refs_list.head());
   1.353      }
   1.354    )
   1.355  }
   1.356  
   1.357  const char* ReferenceProcessor::list_name(int i) {
   1.358 -   assert(i >= 0 && i <= _num_q * subclasses_of_ref, "Out of bounds index");
   1.359 -   int j = i / _num_q;
   1.360 +   assert(i >= 0 && i <= _max_num_q * subclasses_of_ref, "Out of bounds index");
   1.361 +   int j = i / _max_num_q;
   1.362     switch (j) {
   1.363       case 0: return "SoftRef";
   1.364       case 1: return "WeakRef";
   1.365 @@ -1372,7 +1462,7 @@
   1.366  #ifndef PRODUCT
   1.367  void ReferenceProcessor::clear_discovered_references() {
   1.368    guarantee(!_discovering_refs, "Discovering refs?");
   1.369 -  for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
   1.370 +  for (int i = 0; i < _max_num_q * subclasses_of_ref; i++) {
   1.371      oop obj = _discoveredSoftRefs[i].head();
   1.372      while (obj != sentinel_ref()) {
   1.373        oop next = java_lang_ref_Reference::discovered(obj);

mercurial