diff -r 4efcd79826f2 -r 8c413497f434 src/share/vm/services/memSnapshot.cpp --- a/src/share/vm/services/memSnapshot.cpp Fri Nov 09 11:47:28 2012 -0800 +++ b/src/share/vm/services/memSnapshot.cpp Fri Nov 09 22:22:53 2012 -0800 @@ -50,7 +50,7 @@ tty->print_cr(" (tag)"); } } else { - if (rec->is_arena_size_record()) { + if (rec->is_arena_memory_record()) { tty->print_cr(" (arena size)"); } else if (rec->is_allocation_record()) { tty->print_cr(" (malloc)"); @@ -401,21 +401,31 @@ } } -void MemSnapshot::copy_pointer(MemPointerRecord* dest, const MemPointerRecord* src) { + +void MemSnapshot::copy_seq_pointer(MemPointerRecord* dest, const MemPointerRecord* src) { assert(dest != NULL && src != NULL, "Just check"); assert(dest->addr() == src->addr(), "Just check"); + assert(dest->seq() > 0 && src->seq() > 0, "not sequenced"); - MEMFLAGS flags = dest->flags(); + if (MemTracker::track_callsite()) { + *(SeqMemPointerRecordEx*)dest = *(SeqMemPointerRecordEx*)src; + } else { + *(SeqMemPointerRecord*)dest = *(SeqMemPointerRecord*)src; + } +} + +void MemSnapshot::assign_pointer(MemPointerRecord*dest, const MemPointerRecord* src) { + assert(src != NULL && dest != NULL, "Just check"); + assert(dest->seq() == 0 && src->seq() >0, "cast away sequence"); if (MemTracker::track_callsite()) { *(MemPointerRecordEx*)dest = *(MemPointerRecordEx*)src; } else { - *dest = *src; + *(MemPointerRecord*)dest = *(MemPointerRecord*)src; } } - -// merge a per-thread memory recorder to the staging area +// merge a recorder to the staging area bool MemSnapshot::merge(MemRecorder* rec) { assert(rec != NULL && !rec->out_of_memory(), "Just check"); @@ -423,71 +433,45 @@ MutexLockerEx lock(_lock, true); MemPointerIterator malloc_staging_itr(_staging_area.malloc_data()); - MemPointerRecord *p1, *p2; - p1 = (MemPointerRecord*) itr.current(); - while (p1 != NULL) { - if (p1->is_vm_pointer()) { + MemPointerRecord* incoming_rec = (MemPointerRecord*) itr.current(); + MemPointerRecord* matched_rec; + + while (incoming_rec != NULL) { + if (incoming_rec->is_vm_pointer()) { // we don't do anything with virtual memory records during merge - if (!_staging_area.vm_data()->append(p1)) { + if (!_staging_area.vm_data()->append(incoming_rec)) { return false; } } else { // locate matched record and/or also position the iterator to proper // location for this incoming record. - p2 = (MemPointerRecord*)malloc_staging_itr.locate(p1->addr()); - // we have not seen this memory block, so just add to staging area - if (p2 == NULL) { - if (!malloc_staging_itr.insert(p1)) { + matched_rec = (MemPointerRecord*)malloc_staging_itr.locate(incoming_rec->addr()); + // we have not seen this memory block in this generation, + // so just add to staging area + if (matched_rec == NULL) { + if (!malloc_staging_itr.insert(incoming_rec)) { return false; } - } else if (p1->addr() == p2->addr()) { - MemPointerRecord* staging_next = (MemPointerRecord*)malloc_staging_itr.peek_next(); - // a memory block can have many tagging records, find right one to replace or - // right position to insert - while (staging_next != NULL && staging_next->addr() == p1->addr()) { - if ((staging_next->flags() & MemPointerRecord::tag_masks) <= - (p1->flags() & MemPointerRecord::tag_masks)) { - p2 = (MemPointerRecord*)malloc_staging_itr.next(); - staging_next = (MemPointerRecord*)malloc_staging_itr.peek_next(); - } else { - break; - } + } else if (incoming_rec->addr() == matched_rec->addr()) { + // whoever has higher sequence number wins + if (incoming_rec->seq() > matched_rec->seq()) { + copy_seq_pointer(matched_rec, incoming_rec); } - int df = (p1->flags() & MemPointerRecord::tag_masks) - - (p2->flags() & MemPointerRecord::tag_masks); - if (df == 0) { - assert(p1->seq() > 0, "not sequenced"); - assert(p2->seq() > 0, "not sequenced"); - if (p1->seq() > p2->seq()) { - copy_pointer(p2, p1); - } - } else if (df < 0) { - if (!malloc_staging_itr.insert(p1)) { - return false; - } - } else { - if (!malloc_staging_itr.insert_after(p1)) { - return false; - } - } - } else if (p1->addr() < p2->addr()) { - if (!malloc_staging_itr.insert(p1)) { + } else if (incoming_rec->addr() < matched_rec->addr()) { + if (!malloc_staging_itr.insert(incoming_rec)) { return false; } } else { - if (!malloc_staging_itr.insert_after(p1)) { - return false; - } + ShouldNotReachHere(); } } - p1 = (MemPointerRecord*)itr.next(); + incoming_rec = (MemPointerRecord*)itr.next(); } NOT_PRODUCT(void check_staging_data();) return true; } - // promote data to next generation bool MemSnapshot::promote() { assert(_alloc_ptrs != NULL && _vm_ptrs != NULL, "Just check"); @@ -518,20 +502,25 @@ // found matched memory block if (matched_rec != NULL && new_rec->addr() == matched_rec->addr()) { // snapshot already contains 'live' records - assert(matched_rec->is_allocation_record() || matched_rec->is_arena_size_record(), + assert(matched_rec->is_allocation_record() || matched_rec->is_arena_memory_record(), "Sanity check"); // update block states - if (new_rec->is_allocation_record() || new_rec->is_arena_size_record()) { - copy_pointer(matched_rec, new_rec); + if (new_rec->is_allocation_record()) { + assign_pointer(matched_rec, new_rec); + } else if (new_rec->is_arena_memory_record()) { + if (new_rec->size() == 0) { + // remove size record once size drops to 0 + malloc_snapshot_itr.remove(); + } else { + assign_pointer(matched_rec, new_rec); + } } else { // a deallocation record assert(new_rec->is_deallocation_record(), "Sanity check"); // an arena record can be followed by a size record, we need to remove both if (matched_rec->is_arena_record()) { MemPointerRecord* next = (MemPointerRecord*)malloc_snapshot_itr.peek_next(); - if (next->is_arena_size_record()) { - // it has to match the arena record - assert(next->is_size_record_of_arena(matched_rec), "Sanity check"); + if (next->is_arena_memory_record() && next->is_memory_record_of_arena(matched_rec)) { malloc_snapshot_itr.remove(); } } @@ -539,17 +528,13 @@ malloc_snapshot_itr.remove(); } } else { - // it is a new record, insert into snapshot - if (new_rec->is_arena_size_record()) { - MemPointerRecord* prev = (MemPointerRecord*)malloc_snapshot_itr.peek_prev(); - if (prev == NULL || !prev->is_arena_record() || !new_rec->is_size_record_of_arena(prev)) { - // no matched arena record, ignore the size record - new_rec = NULL; - } + // don't insert size 0 record + if (new_rec->is_arena_memory_record() && new_rec->size() == 0) { + new_rec = NULL; } - // only 'live' record can go into snapshot + if (new_rec != NULL) { - if (new_rec->is_allocation_record() || new_rec->is_arena_size_record()) { + if (new_rec->is_allocation_record() || new_rec->is_arena_memory_record()) { if (matched_rec != NULL && new_rec->addr() > matched_rec->addr()) { if (!malloc_snapshot_itr.insert_after(new_rec)) { return false;