src/share/vm/gc_interface/collectedHeap.hpp

changeset 1601
7b0e9cba0307
parent 1600
2dd52dea6d28
child 1822
0bfd3fb24150
     1.1 --- a/src/share/vm/gc_interface/collectedHeap.hpp	Tue Jan 12 14:56:46 2010 -0800
     1.2 +++ b/src/share/vm/gc_interface/collectedHeap.hpp	Wed Jan 13 15:26:39 2010 -0800
     1.3 @@ -51,6 +51,9 @@
     1.4    // Used for filler objects (static, but initialized in ctor).
     1.5    static size_t _filler_array_max_size;
     1.6  
     1.7 +  // Used in support of ReduceInitialCardMarks; only consulted if COMPILER2 is being used
     1.8 +  bool _defer_initial_card_mark;
     1.9 +
    1.10   protected:
    1.11    MemRegion _reserved;
    1.12    BarrierSet* _barrier_set;
    1.13 @@ -70,13 +73,16 @@
    1.14    // Constructor
    1.15    CollectedHeap();
    1.16  
    1.17 +  // Do common initializations that must follow instance construction,
    1.18 +  // for example, those needing virtual calls.
    1.19 +  // This code could perhaps be moved into initialize() but would
    1.20 +  // be slightly more awkward because we want the latter to be a
    1.21 +  // pure virtual.
    1.22 +  void pre_initialize();
    1.23 +
    1.24    // Create a new tlab
    1.25    virtual HeapWord* allocate_new_tlab(size_t size);
    1.26  
    1.27 -  // Fix up tlabs to make the heap well-formed again,
    1.28 -  // optionally retiring the tlabs.
    1.29 -  virtual void fill_all_tlabs(bool retire);
    1.30 -
    1.31    // Accumulate statistics on all tlabs.
    1.32    virtual void accumulate_statistics_all_tlabs();
    1.33  
    1.34 @@ -431,14 +437,25 @@
    1.35    // promises to call this function on such a slow-path-allocated
    1.36    // object before performing initializations that have elided
    1.37    // store barriers. Returns new_obj, or maybe a safer copy thereof.
    1.38 -  virtual oop defer_store_barrier(JavaThread* thread, oop new_obj);
    1.39 +  virtual oop new_store_pre_barrier(JavaThread* thread, oop new_obj);
    1.40  
    1.41    // Answers whether an initializing store to a new object currently
    1.42 -  // allocated at the given address doesn't need a (deferred) store
    1.43 +  // allocated at the given address doesn't need a store
    1.44    // barrier. Returns "true" if it doesn't need an initializing
    1.45    // store barrier; answers "false" if it does.
    1.46    virtual bool can_elide_initializing_store_barrier(oop new_obj) = 0;
    1.47  
    1.48 +  // If a compiler is eliding store barriers for TLAB-allocated objects,
    1.49 +  // we will be informed of a slow-path allocation by a call
    1.50 +  // to new_store_pre_barrier() above. Such a call precedes the
    1.51 +  // initialization of the object itself, and no post-store-barriers will
    1.52 +  // be issued. Some heap types require that the barrier strictly follows
    1.53 +  // the initializing stores. (This is currently implemented by deferring the
    1.54 +  // barrier until the next slow-path allocation or gc-related safepoint.)
    1.55 +  // This interface answers whether a particular heap type needs the card
    1.56 +  // mark to be thus strictly sequenced after the stores.
    1.57 +  virtual bool card_mark_must_follow_store() const = 0;
    1.58 +
    1.59    // If the CollectedHeap was asked to defer a store barrier above,
    1.60    // this informs it to flush such a deferred store barrier to the
    1.61    // remembered set.

mercurial