6604422: G1: re-use half-promoted regions

Sun, 15 Mar 2009 22:03:38 -0400

author
tonyp
date
Sun, 15 Mar 2009 22:03:38 -0400
changeset 1071
6c4cea9bfa11
parent 1070
fe2441500281
child 1072
25e146966e7c

6604422: G1: re-use half-promoted regions
6728271: G1: Cleanup G1CollectedHeap::get_gc_alloc_regions()
Summary: It allows the last half-full region to be allocated to during a GC to be reused during the next GC.
Reviewed-by: apetrusenko, jcoomes

src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp file | annotate | diff | comparison | revisions
src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp file | annotate | diff | comparison | revisions
src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp file | annotate | diff | comparison | revisions
     1.1 --- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Fri Mar 13 17:06:44 2009 -0700
     1.2 +++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Sun Mar 15 22:03:38 2009 -0400
     1.3 @@ -786,6 +786,12 @@
     1.4    }
     1.5  }
     1.6  
     1.7 +void G1CollectedHeap::abandon_gc_alloc_regions() {
     1.8 +  // first, make sure that the GC alloc region list is empty (it should!)
     1.9 +  assert(_gc_alloc_region_list == NULL, "invariant");
    1.10 +  release_gc_alloc_regions(true /* totally */);
    1.11 +}
    1.12 +
    1.13  class PostMCRemSetClearClosure: public HeapRegionClosure {
    1.14    ModRefBarrierSet* _mr_bs;
    1.15  public:
    1.16 @@ -914,6 +920,7 @@
    1.17  
    1.18      // Make sure we'll choose a new allocation region afterwards.
    1.19      abandon_cur_alloc_region();
    1.20 +    abandon_gc_alloc_regions();
    1.21      assert(_cur_alloc_region == NULL, "Invariant.");
    1.22      g1_rem_set()->as_HRInto_G1RemSet()->cleanupHRRS();
    1.23      tear_down_region_lists();
    1.24 @@ -1306,7 +1313,7 @@
    1.25  }
    1.26  
    1.27  void G1CollectedHeap::shrink(size_t shrink_bytes) {
    1.28 -  release_gc_alloc_regions();
    1.29 +  release_gc_alloc_regions(true /* totally */);
    1.30    tear_down_region_lists();  // We will rebuild them in a moment.
    1.31    shrink_helper(shrink_bytes);
    1.32    rebuild_region_lists();
    1.33 @@ -1345,8 +1352,7 @@
    1.34    _gc_time_stamp(0),
    1.35    _surviving_young_words(NULL),
    1.36    _in_cset_fast_test(NULL),
    1.37 -  _in_cset_fast_test_base(NULL)
    1.38 -{
    1.39 +  _in_cset_fast_test_base(NULL) {
    1.40    _g1h = this; // To catch bugs.
    1.41    if (_process_strong_tasks == NULL || !_process_strong_tasks->valid()) {
    1.42      vm_exit_during_initialization("Failed necessary allocation.");
    1.43 @@ -1371,9 +1377,19 @@
    1.44    }
    1.45  
    1.46    for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
    1.47 -    _gc_alloc_regions[ap]       = NULL;
    1.48 -    _gc_alloc_region_counts[ap] = 0;
    1.49 -  }
    1.50 +    _gc_alloc_regions[ap]          = NULL;
    1.51 +    _gc_alloc_region_counts[ap]    = 0;
    1.52 +    _retained_gc_alloc_regions[ap] = NULL;
    1.53 +    // by default, we do not retain a GC alloc region for each ap;
    1.54 +    // we'll override this, when appropriate, below
    1.55 +    _retain_gc_alloc_region[ap]    = false;
    1.56 +  }
    1.57 +
    1.58 +  // We will try to remember the last half-full tenured region we
    1.59 +  // allocated to at the end of a collection so that we can re-use it
    1.60 +  // during the next collection.
    1.61 +  _retain_gc_alloc_region[GCAllocForTenured]  = true;
    1.62 +
    1.63    guarantee(_task_queues != NULL, "task_queues allocation failure.");
    1.64  }
    1.65  
    1.66 @@ -2644,7 +2660,7 @@
    1.67          popular_region->set_popular_pending(false);
    1.68        }
    1.69  
    1.70 -      release_gc_alloc_regions();
    1.71 +      release_gc_alloc_regions(false /* totally */);
    1.72  
    1.73        cleanup_surviving_young_words();
    1.74  
    1.75 @@ -2735,6 +2751,10 @@
    1.76  
    1.77  void G1CollectedHeap::set_gc_alloc_region(int purpose, HeapRegion* r) {
    1.78    assert(purpose >= 0 && purpose < GCAllocPurposeCount, "invalid purpose");
    1.79 +  // make sure we don't call set_gc_alloc_region() multiple times on
    1.80 +  // the same region
    1.81 +  assert(r == NULL || !r->is_gc_alloc_region(),
    1.82 +         "shouldn't already be a GC alloc region");
    1.83    HeapWord* original_top = NULL;
    1.84    if (r != NULL)
    1.85      original_top = r->top();
    1.86 @@ -2851,23 +2871,55 @@
    1.87  }
    1.88  
    1.89  void G1CollectedHeap::get_gc_alloc_regions() {
    1.90 +  // First, let's check that the GC alloc region list is empty (it should)
    1.91 +  assert(_gc_alloc_region_list == NULL, "invariant");
    1.92 +
    1.93    for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
    1.94 +    assert(_gc_alloc_regions[ap] == NULL, "invariant");
    1.95 +
    1.96      // Create new GC alloc regions.
    1.97 -    HeapRegion* alloc_region = _gc_alloc_regions[ap];
    1.98 -    // Clear this alloc region, so that in case it turns out to be
    1.99 -    // unacceptable, we end up with no allocation region, rather than a bad
   1.100 -    // one.
   1.101 -    _gc_alloc_regions[ap] = NULL;
   1.102 -    if (alloc_region == NULL || alloc_region->in_collection_set()) {
   1.103 -      // Can't re-use old one.  Allocate a new one.
   1.104 +    HeapRegion* alloc_region = _retained_gc_alloc_regions[ap];
   1.105 +    _retained_gc_alloc_regions[ap] = NULL;
   1.106 +
   1.107 +    if (alloc_region != NULL) {
   1.108 +      assert(_retain_gc_alloc_region[ap], "only way to retain a GC region");
   1.109 +
   1.110 +      // let's make sure that the GC alloc region is not tagged as such
   1.111 +      // outside a GC operation
   1.112 +      assert(!alloc_region->is_gc_alloc_region(), "sanity");
   1.113 +
   1.114 +      if (alloc_region->in_collection_set() ||
   1.115 +          alloc_region->top() == alloc_region->end() ||
   1.116 +          alloc_region->top() == alloc_region->bottom()) {
   1.117 +        // we will discard the current GC alloc region if it's in the
   1.118 +        // collection set (it can happen!), if it's already full (no
   1.119 +        // point in using it), or if it's empty (this means that it
   1.120 +        // was emptied during a cleanup and it should be on the free
   1.121 +        // list now).
   1.122 +
   1.123 +        alloc_region = NULL;
   1.124 +      }
   1.125 +    }
   1.126 +
   1.127 +    if (alloc_region == NULL) {
   1.128 +      // we will get a new GC alloc region
   1.129        alloc_region = newAllocRegionWithExpansion(ap, 0);
   1.130      }
   1.131 +
   1.132      if (alloc_region != NULL) {
   1.133 +      assert(_gc_alloc_regions[ap] == NULL, "pre-condition");
   1.134        set_gc_alloc_region(ap, alloc_region);
   1.135      }
   1.136 +
   1.137 +    assert(_gc_alloc_regions[ap] == NULL ||
   1.138 +           _gc_alloc_regions[ap]->is_gc_alloc_region(),
   1.139 +           "the GC alloc region should be tagged as such");
   1.140 +    assert(_gc_alloc_regions[ap] == NULL ||
   1.141 +           _gc_alloc_regions[ap] == _gc_alloc_region_list,
   1.142 +           "the GC alloc region should be the same as the GC alloc list head");
   1.143    }
   1.144    // Set alternative regions for allocation purposes that have reached
   1.145 -  // thier limit.
   1.146 +  // their limit.
   1.147    for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
   1.148      GCAllocPurpose alt_purpose = g1_policy()->alternative_purpose(ap);
   1.149      if (_gc_alloc_regions[ap] == NULL && alt_purpose != ap) {
   1.150 @@ -2877,27 +2929,55 @@
   1.151    assert(check_gc_alloc_regions(), "alloc regions messed up");
   1.152  }
   1.153  
   1.154 -void G1CollectedHeap::release_gc_alloc_regions() {
   1.155 +void G1CollectedHeap::release_gc_alloc_regions(bool totally) {
   1.156    // We keep a separate list of all regions that have been alloc regions in
   1.157 -  // the current collection pause.  Forget that now.
   1.158 +  // the current collection pause. Forget that now. This method will
   1.159 +  // untag the GC alloc regions and tear down the GC alloc region
   1.160 +  // list. It's desirable that no regions are tagged as GC alloc
   1.161 +  // outside GCs.
   1.162    forget_alloc_region_list();
   1.163  
   1.164    // The current alloc regions contain objs that have survived
   1.165    // collection. Make them no longer GC alloc regions.
   1.166    for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
   1.167      HeapRegion* r = _gc_alloc_regions[ap];
   1.168 -    if (r != NULL && r->is_empty()) {
   1.169 -      {
   1.170 +    _retained_gc_alloc_regions[ap] = NULL;
   1.171 +
   1.172 +    if (r != NULL) {
   1.173 +      // we retain nothing on _gc_alloc_regions between GCs
   1.174 +      set_gc_alloc_region(ap, NULL);
   1.175 +      _gc_alloc_region_counts[ap] = 0;
   1.176 +
   1.177 +      if (r->is_empty()) {
   1.178 +        // we didn't actually allocate anything in it; let's just put
   1.179 +        // it on the free list
   1.180          MutexLockerEx x(ZF_mon, Mutex::_no_safepoint_check_flag);
   1.181          r->set_zero_fill_complete();
   1.182          put_free_region_on_list_locked(r);
   1.183 +      } else if (_retain_gc_alloc_region[ap] && !totally) {
   1.184 +        // retain it so that we can use it at the beginning of the next GC
   1.185 +        _retained_gc_alloc_regions[ap] = r;
   1.186        }
   1.187      }
   1.188 -    // set_gc_alloc_region will also NULLify all aliases to the region
   1.189 -    set_gc_alloc_region(ap, NULL);
   1.190 -    _gc_alloc_region_counts[ap] = 0;
   1.191 -  }
   1.192 -}
   1.193 +  }
   1.194 +}
   1.195 +
   1.196 +#ifndef PRODUCT
   1.197 +// Useful for debugging
   1.198 +
   1.199 +void G1CollectedHeap::print_gc_alloc_regions() {
   1.200 +  gclog_or_tty->print_cr("GC alloc regions");
   1.201 +  for (int ap = 0; ap < GCAllocPurposeCount; ++ap) {
   1.202 +    HeapRegion* r = _gc_alloc_regions[ap];
   1.203 +    if (r == NULL) {
   1.204 +      gclog_or_tty->print_cr("  %2d : "PTR_FORMAT, ap, NULL);
   1.205 +    } else {
   1.206 +      gclog_or_tty->print_cr("  %2d : "PTR_FORMAT" "SIZE_FORMAT,
   1.207 +                             ap, r->bottom(), r->used());
   1.208 +    }
   1.209 +  }
   1.210 +}
   1.211 +#endif // PRODUCT
   1.212  
   1.213  void G1CollectedHeap::init_for_evac_failure(OopsInHeapRegionClosure* cl) {
   1.214    _drain_in_progress = false;
     2.1 --- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp	Fri Mar 13 17:06:44 2009 -0700
     2.2 +++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.hpp	Sun Mar 15 22:03:38 2009 -0400
     2.3 @@ -172,7 +172,6 @@
     2.4      NumAPIs = HeapRegion::MaxAge
     2.5    };
     2.6  
     2.7 -
     2.8    // The one and only G1CollectedHeap, so static functions can find it.
     2.9    static G1CollectedHeap* _g1h;
    2.10  
    2.11 @@ -217,11 +216,20 @@
    2.12  
    2.13    // Postcondition: cur_alloc_region == NULL.
    2.14    void abandon_cur_alloc_region();
    2.15 +  void abandon_gc_alloc_regions();
    2.16  
    2.17    // The to-space memory regions into which objects are being copied during
    2.18    // a GC.
    2.19    HeapRegion* _gc_alloc_regions[GCAllocPurposeCount];
    2.20    size_t _gc_alloc_region_counts[GCAllocPurposeCount];
    2.21 +  // These are the regions, one per GCAllocPurpose, that are half-full
    2.22 +  // at the end of a collection and that we want to reuse during the
    2.23 +  // next collection.
    2.24 +  HeapRegion* _retained_gc_alloc_regions[GCAllocPurposeCount];
    2.25 +  // This specifies whether we will keep the last half-full region at
    2.26 +  // the end of a collection so that it can be reused during the next
    2.27 +  // collection (this is specified per GCAllocPurpose)
    2.28 +  bool _retain_gc_alloc_region[GCAllocPurposeCount];
    2.29  
    2.30    // A list of the regions that have been set to be alloc regions in the
    2.31    // current collection.
    2.32 @@ -589,8 +597,21 @@
    2.33  
    2.34    // Ensure that the relevant gc_alloc regions are set.
    2.35    void get_gc_alloc_regions();
    2.36 -  // We're done with GC alloc regions; release them, as appropriate.
    2.37 -  void release_gc_alloc_regions();
    2.38 +  // We're done with GC alloc regions. We are going to tear down the
    2.39 +  // gc alloc list and remove the gc alloc tag from all the regions on
    2.40 +  // that list. However, we will also retain the last (i.e., the one
    2.41 +  // that is half-full) GC alloc region, per GCAllocPurpose, for
    2.42 +  // possible reuse during the next collection, provided
    2.43 +  // _retain_gc_alloc_region[] indicates that it should be the
    2.44 +  // case. Said regions are kept in the _retained_gc_alloc_regions[]
    2.45 +  // array. If the parameter totally is set, we will not retain any
    2.46 +  // regions, irrespective of what _retain_gc_alloc_region[]
    2.47 +  // indicates.
    2.48 +  void release_gc_alloc_regions(bool totally);
    2.49 +#ifndef PRODUCT
    2.50 +  // Useful for debugging.
    2.51 +  void print_gc_alloc_regions();
    2.52 +#endif // !PRODUCT
    2.53  
    2.54    // ("Weak") Reference processing support
    2.55    ReferenceProcessor* _ref_processor;
     3.1 --- a/src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp	Fri Mar 13 17:06:44 2009 -0700
     3.2 +++ b/src/share/vm/gc_implementation/g1/g1CollectorPolicy.cpp	Sun Mar 15 22:03:38 2009 -0400
     3.3 @@ -1087,6 +1087,7 @@
     3.4  
     3.5    assert(_g1->used_regions() == _g1->recalculate_used_regions(),
     3.6           "sanity");
     3.7 +  assert(_g1->used() == _g1->recalculate_used(), "sanity");
     3.8  
     3.9    double s_w_t_ms = (start_time_sec - _stop_world_start) * 1000.0;
    3.10    _all_stop_world_times_ms->add(s_w_t_ms);

mercurial