1.1 --- a/src/share/vm/memory/allocation.cpp Wed Aug 27 09:36:55 2014 +0200 1.2 +++ b/src/share/vm/memory/allocation.cpp Wed Aug 27 08:19:12 2014 -0400 1.3 @@ -438,24 +438,22 @@ 1.4 } 1.5 1.6 //------------------------------Arena------------------------------------------ 1.7 -NOT_PRODUCT(volatile jint Arena::_instance_count = 0;) 1.8 - 1.9 -Arena::Arena(size_t init_size) { 1.10 +Arena::Arena(MEMFLAGS flag, size_t init_size) : _flags(flag), _size_in_bytes(0) { 1.11 size_t round_size = (sizeof (char *)) - 1; 1.12 init_size = (init_size+round_size) & ~round_size; 1.13 _first = _chunk = new (AllocFailStrategy::EXIT_OOM, init_size) Chunk(init_size); 1.14 _hwm = _chunk->bottom(); // Save the cached hwm, max 1.15 _max = _chunk->top(); 1.16 + MemTracker::record_new_arena(flag); 1.17 set_size_in_bytes(init_size); 1.18 - NOT_PRODUCT(Atomic::inc(&_instance_count);) 1.19 } 1.20 1.21 -Arena::Arena() { 1.22 +Arena::Arena(MEMFLAGS flag) : _flags(flag), _size_in_bytes(0) { 1.23 _first = _chunk = new (AllocFailStrategy::EXIT_OOM, Chunk::init_size) Chunk(Chunk::init_size); 1.24 _hwm = _chunk->bottom(); // Save the cached hwm, max 1.25 _max = _chunk->top(); 1.26 + MemTracker::record_new_arena(flag); 1.27 set_size_in_bytes(Chunk::init_size); 1.28 - NOT_PRODUCT(Atomic::inc(&_instance_count);) 1.29 } 1.30 1.31 Arena *Arena::move_contents(Arena *copy) { 1.32 @@ -477,7 +475,7 @@ 1.33 1.34 Arena::~Arena() { 1.35 destruct_contents(); 1.36 - NOT_PRODUCT(Atomic::dec(&_instance_count);) 1.37 + MemTracker::record_arena_free(_flags); 1.38 } 1.39 1.40 void* Arena::operator new(size_t size) throw() { 1.41 @@ -493,21 +491,21 @@ 1.42 // dynamic memory type binding 1.43 void* Arena::operator new(size_t size, MEMFLAGS flags) throw() { 1.44 #ifdef ASSERT 1.45 - void* p = (void*)AllocateHeap(size, flags|otArena, CALLER_PC); 1.46 + void* p = (void*)AllocateHeap(size, flags, CALLER_PC); 1.47 if (PrintMallocFree) trace_heap_malloc(size, "Arena-new", p); 1.48 return p; 1.49 #else 1.50 - return (void *) AllocateHeap(size, flags|otArena, CALLER_PC); 1.51 + return (void *) AllocateHeap(size, flags, CALLER_PC); 1.52 #endif 1.53 } 1.54 1.55 void* Arena::operator new(size_t size, const std::nothrow_t& nothrow_constant, MEMFLAGS flags) throw() { 1.56 #ifdef ASSERT 1.57 - void* p = os::malloc(size, flags|otArena, CALLER_PC); 1.58 + void* p = os::malloc(size, flags, CALLER_PC); 1.59 if (PrintMallocFree) trace_heap_malloc(size, "Arena-new", p); 1.60 return p; 1.61 #else 1.62 - return os::malloc(size, flags|otArena, CALLER_PC); 1.63 + return os::malloc(size, flags, CALLER_PC); 1.64 #endif 1.65 } 1.66 1.67 @@ -532,8 +530,9 @@ 1.68 // change the size 1.69 void Arena::set_size_in_bytes(size_t size) { 1.70 if (_size_in_bytes != size) { 1.71 + long delta = (long)(size - size_in_bytes()); 1.72 _size_in_bytes = size; 1.73 - MemTracker::record_arena_size((address)this, size); 1.74 + MemTracker::record_arena_size_change(delta, _flags); 1.75 } 1.76 } 1.77