src/share/vm/gc_implementation/shared/mutableSpace.cpp

Sat, 01 Sep 2012 13:25:18 -0400

author
coleenp
date
Sat, 01 Sep 2012 13:25:18 -0400
changeset 4037
da91efe96a93
parent 3711
b632e80fc9dc
child 4542
db9981fd3124
permissions
-rw-r--r--

6964458: Reimplement class meta-data storage to use native memory
Summary: Remove PermGen, allocate meta-data in metaspace linked to class loaders, rewrite GC walking, rewrite and rename metadata to be C++ classes
Reviewed-by: jmasa, stefank, never, coleenp, kvn, brutisso, mgerdin, dholmes, jrose, twisti, roland
Contributed-by: jmasa <jon.masamitsu@oracle.com>, stefank <stefan.karlsson@oracle.com>, mgerdin <mikael.gerdin@oracle.com>, never <tom.rodriguez@oracle.com>

duke@435 1 /*
brutisso@3711 2 * Copyright (c) 2001, 2012, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #ifndef SERIALGC
stefank@2314 27 #include "gc_implementation/shared/mutableSpace.hpp"
stefank@2314 28 #include "gc_implementation/shared/spaceDecorator.hpp"
stefank@2314 29 #include "oops/oop.inline.hpp"
stefank@2314 30 #include "runtime/safepoint.hpp"
stefank@2314 31 #include "runtime/thread.hpp"
stefank@2314 32 #endif
duke@435 33
iveresov@970 34 MutableSpace::MutableSpace(size_t alignment): ImmutableSpace(), _top(NULL), _alignment(alignment) {
iveresov@970 35 assert(MutableSpace::alignment() >= 0 &&
iveresov@970 36 MutableSpace::alignment() % os::vm_page_size() == 0,
iveresov@970 37 "Space should be aligned");
jmasa@698 38 _mangler = new MutableSpaceMangler(this);
jmasa@698 39 }
jmasa@698 40
jmasa@698 41 MutableSpace::~MutableSpace() {
jmasa@698 42 delete _mangler;
jmasa@698 43 }
jmasa@698 44
iveresov@970 45 void MutableSpace::numa_setup_pages(MemRegion mr, bool clear_space) {
iveresov@970 46 if (!mr.is_empty()) {
iveresov@970 47 size_t page_size = UseLargePages ? alignment() : os::vm_page_size();
iveresov@970 48 HeapWord *start = (HeapWord*)round_to((intptr_t) mr.start(), page_size);
iveresov@970 49 HeapWord *end = (HeapWord*)round_down((intptr_t) mr.end(), page_size);
iveresov@970 50 if (end > start) {
iveresov@970 51 size_t size = pointer_delta(end, start, sizeof(char));
iveresov@970 52 if (clear_space) {
iveresov@970 53 // Prefer page reallocation to migration.
iveresov@3363 54 os::free_memory((char*)start, size, page_size);
iveresov@970 55 }
iveresov@970 56 os::numa_make_global((char*)start, size);
iveresov@970 57 }
iveresov@970 58 }
iveresov@970 59 }
iveresov@970 60
iveresov@970 61 void MutableSpace::pretouch_pages(MemRegion mr) {
iveresov@970 62 for (volatile char *p = (char*)mr.start(); p < (char*)mr.end(); p += os::vm_page_size()) {
iveresov@970 63 char t = *p; *p = t;
iveresov@970 64 }
iveresov@970 65 }
iveresov@970 66
jmasa@698 67 void MutableSpace::initialize(MemRegion mr,
jmasa@698 68 bool clear_space,
iveresov@970 69 bool mangle_space,
iveresov@970 70 bool setup_pages) {
duke@435 71
iveresov@970 72 assert(Universe::on_page_boundary(mr.start()) && Universe::on_page_boundary(mr.end()),
duke@435 73 "invalid space boundaries");
iveresov@970 74
iveresov@970 75 if (setup_pages && (UseNUMA || AlwaysPreTouch)) {
iveresov@970 76 // The space may move left and right or expand/shrink.
iveresov@970 77 // We'd like to enforce the desired page placement.
iveresov@970 78 MemRegion head, tail;
iveresov@970 79 if (last_setup_region().is_empty()) {
iveresov@970 80 // If it's the first initialization don't limit the amount of work.
iveresov@970 81 head = mr;
iveresov@970 82 tail = MemRegion(mr.end(), mr.end());
iveresov@970 83 } else {
iveresov@970 84 // Is there an intersection with the address space?
iveresov@970 85 MemRegion intersection = last_setup_region().intersection(mr);
iveresov@970 86 if (intersection.is_empty()) {
iveresov@970 87 intersection = MemRegion(mr.end(), mr.end());
iveresov@970 88 }
iveresov@970 89 // All the sizes below are in words.
iveresov@970 90 size_t head_size = 0, tail_size = 0;
iveresov@970 91 if (mr.start() <= intersection.start()) {
iveresov@970 92 head_size = pointer_delta(intersection.start(), mr.start());
iveresov@970 93 }
iveresov@970 94 if(intersection.end() <= mr.end()) {
iveresov@970 95 tail_size = pointer_delta(mr.end(), intersection.end());
iveresov@970 96 }
iveresov@970 97 // Limit the amount of page manipulation if necessary.
iveresov@970 98 if (NUMASpaceResizeRate > 0 && !AlwaysPreTouch) {
iveresov@970 99 const size_t change_size = head_size + tail_size;
iveresov@970 100 const float setup_rate_words = NUMASpaceResizeRate >> LogBytesPerWord;
iveresov@970 101 head_size = MIN2((size_t)(setup_rate_words * head_size / change_size),
iveresov@970 102 head_size);
iveresov@970 103 tail_size = MIN2((size_t)(setup_rate_words * tail_size / change_size),
iveresov@970 104 tail_size);
iveresov@970 105 }
iveresov@970 106 head = MemRegion(intersection.start() - head_size, intersection.start());
iveresov@970 107 tail = MemRegion(intersection.end(), intersection.end() + tail_size);
iveresov@970 108 }
iveresov@970 109 assert(mr.contains(head) && mr.contains(tail), "Sanity");
iveresov@970 110
iveresov@970 111 if (UseNUMA) {
iveresov@970 112 numa_setup_pages(head, clear_space);
iveresov@970 113 numa_setup_pages(tail, clear_space);
iveresov@970 114 }
iveresov@970 115
iveresov@970 116 if (AlwaysPreTouch) {
iveresov@970 117 pretouch_pages(head);
iveresov@970 118 pretouch_pages(tail);
iveresov@970 119 }
iveresov@970 120
iveresov@970 121 // Remember where we stopped so that we can continue later.
iveresov@970 122 set_last_setup_region(MemRegion(head.start(), tail.end()));
iveresov@970 123 }
iveresov@970 124
iveresov@970 125 set_bottom(mr.start());
iveresov@970 126 set_end(mr.end());
duke@435 127
jmasa@698 128 if (clear_space) {
jmasa@698 129 clear(mangle_space);
jmasa@698 130 }
duke@435 131 }
duke@435 132
jmasa@698 133 void MutableSpace::clear(bool mangle_space) {
duke@435 134 set_top(bottom());
jmasa@698 135 if (ZapUnusedHeapArea && mangle_space) {
jmasa@698 136 mangle_unused_area();
jmasa@698 137 }
duke@435 138 }
duke@435 139
jmasa@698 140 #ifndef PRODUCT
jmasa@698 141 void MutableSpace::check_mangled_unused_area(HeapWord* limit) {
jmasa@698 142 mangler()->check_mangled_unused_area(limit);
jmasa@698 143 }
jmasa@698 144
jmasa@698 145 void MutableSpace::check_mangled_unused_area_complete() {
jmasa@698 146 mangler()->check_mangled_unused_area_complete();
jmasa@698 147 }
jmasa@698 148
jmasa@698 149 // Mangle only the unused space that has not previously
jmasa@698 150 // been mangled and that has not been allocated since being
jmasa@698 151 // mangled.
jmasa@698 152 void MutableSpace::mangle_unused_area() {
jmasa@698 153 mangler()->mangle_unused_area();
jmasa@698 154 }
jmasa@698 155
jmasa@698 156 void MutableSpace::mangle_unused_area_complete() {
jmasa@698 157 mangler()->mangle_unused_area_complete();
jmasa@698 158 }
jmasa@698 159
jmasa@698 160 void MutableSpace::mangle_region(MemRegion mr) {
jmasa@698 161 SpaceMangler::mangle_region(mr);
jmasa@698 162 }
jmasa@698 163
jmasa@698 164 void MutableSpace::set_top_for_allocations(HeapWord* v) {
jmasa@698 165 mangler()->set_top_for_allocations(v);
jmasa@698 166 }
jmasa@698 167
jmasa@698 168 void MutableSpace::set_top_for_allocations() {
jmasa@698 169 mangler()->set_top_for_allocations(top());
jmasa@698 170 }
jmasa@698 171 #endif
jmasa@698 172
duke@435 173 // This version requires locking. */
duke@435 174 HeapWord* MutableSpace::allocate(size_t size) {
duke@435 175 assert(Heap_lock->owned_by_self() ||
duke@435 176 (SafepointSynchronize::is_at_safepoint() &&
duke@435 177 Thread::current()->is_VM_thread()),
duke@435 178 "not locked");
duke@435 179 HeapWord* obj = top();
duke@435 180 if (pointer_delta(end(), obj) >= size) {
duke@435 181 HeapWord* new_top = obj + size;
duke@435 182 set_top(new_top);
duke@435 183 assert(is_object_aligned((intptr_t)obj) && is_object_aligned((intptr_t)new_top),
duke@435 184 "checking alignment");
duke@435 185 return obj;
duke@435 186 } else {
duke@435 187 return NULL;
duke@435 188 }
duke@435 189 }
duke@435 190
duke@435 191 // This version is lock-free.
duke@435 192 HeapWord* MutableSpace::cas_allocate(size_t size) {
duke@435 193 do {
duke@435 194 HeapWord* obj = top();
duke@435 195 if (pointer_delta(end(), obj) >= size) {
duke@435 196 HeapWord* new_top = obj + size;
duke@435 197 HeapWord* result = (HeapWord*)Atomic::cmpxchg_ptr(new_top, top_addr(), obj);
duke@435 198 // result can be one of two:
duke@435 199 // the old top value: the exchange succeeded
duke@435 200 // otherwise: the new value of the top is returned.
duke@435 201 if (result != obj) {
duke@435 202 continue; // another thread beat us to the allocation, try again
duke@435 203 }
duke@435 204 assert(is_object_aligned((intptr_t)obj) && is_object_aligned((intptr_t)new_top),
duke@435 205 "checking alignment");
duke@435 206 return obj;
duke@435 207 } else {
duke@435 208 return NULL;
duke@435 209 }
duke@435 210 } while (true);
duke@435 211 }
duke@435 212
duke@435 213 // Try to deallocate previous allocation. Returns true upon success.
duke@435 214 bool MutableSpace::cas_deallocate(HeapWord *obj, size_t size) {
duke@435 215 HeapWord* expected_top = obj + size;
duke@435 216 return (HeapWord*)Atomic::cmpxchg_ptr(obj, top_addr(), expected_top) == expected_top;
duke@435 217 }
duke@435 218
coleenp@4037 219 void MutableSpace::oop_iterate(ExtendedOopClosure* cl) {
duke@435 220 HeapWord* obj_addr = bottom();
duke@435 221 HeapWord* t = top();
duke@435 222 // Could call objects iterate, but this is easier.
duke@435 223 while (obj_addr < t) {
duke@435 224 obj_addr += oop(obj_addr)->oop_iterate(cl);
duke@435 225 }
duke@435 226 }
duke@435 227
coleenp@4037 228 void MutableSpace::oop_iterate_no_header(OopClosure* cl) {
coleenp@4037 229 HeapWord* obj_addr = bottom();
coleenp@4037 230 HeapWord* t = top();
coleenp@4037 231 // Could call objects iterate, but this is easier.
coleenp@4037 232 while (obj_addr < t) {
coleenp@4037 233 obj_addr += oop(obj_addr)->oop_iterate_no_header(cl);
coleenp@4037 234 }
coleenp@4037 235 }
coleenp@4037 236
duke@435 237 void MutableSpace::object_iterate(ObjectClosure* cl) {
duke@435 238 HeapWord* p = bottom();
duke@435 239 while (p < top()) {
duke@435 240 cl->do_object(oop(p));
duke@435 241 p += oop(p)->size();
duke@435 242 }
duke@435 243 }
duke@435 244
duke@435 245 void MutableSpace::print_short() const { print_short_on(tty); }
duke@435 246 void MutableSpace::print_short_on( outputStream* st) const {
duke@435 247 st->print(" space " SIZE_FORMAT "K, %d%% used", capacity_in_bytes() / K,
duke@435 248 (int) ((double) used_in_bytes() * 100 / capacity_in_bytes()));
duke@435 249 }
duke@435 250
duke@435 251 void MutableSpace::print() const { print_on(tty); }
duke@435 252 void MutableSpace::print_on(outputStream* st) const {
duke@435 253 MutableSpace::print_short_on(st);
duke@435 254 st->print_cr(" [" INTPTR_FORMAT "," INTPTR_FORMAT "," INTPTR_FORMAT ")",
duke@435 255 bottom(), top(), end());
duke@435 256 }
duke@435 257
brutisso@3711 258 void MutableSpace::verify() {
duke@435 259 HeapWord* p = bottom();
duke@435 260 HeapWord* t = top();
duke@435 261 HeapWord* prev_p = NULL;
duke@435 262 while (p < t) {
duke@435 263 oop(p)->verify();
duke@435 264 prev_p = p;
duke@435 265 p += oop(p)->size();
duke@435 266 }
duke@435 267 guarantee(p == top(), "end of last object must match end of space");
duke@435 268 }

mercurial