src/share/vm/gc_implementation/shared/mutableSpace.cpp

changeset 0
f90c822e73f8
child 1
2d8a650513c2
equal deleted inserted replaced
-1:000000000000 0:f90c822e73f8
1 /*
2 * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "utilities/macros.hpp"
27 #if INCLUDE_ALL_GCS
28 #include "gc_implementation/shared/mutableSpace.hpp"
29 #include "gc_implementation/shared/spaceDecorator.hpp"
30 #include "oops/oop.inline.hpp"
31 #include "runtime/safepoint.hpp"
32 #include "runtime/thread.hpp"
33 #endif // INCLUDE_ALL_GCS
34
35 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
36
37 MutableSpace::MutableSpace(size_t alignment): ImmutableSpace(), _top(NULL), _alignment(alignment) {
38 assert(MutableSpace::alignment() >= 0 &&
39 MutableSpace::alignment() % os::vm_page_size() == 0,
40 "Space should be aligned");
41 _mangler = new MutableSpaceMangler(this);
42 }
43
44 MutableSpace::~MutableSpace() {
45 delete _mangler;
46 }
47
48 void MutableSpace::numa_setup_pages(MemRegion mr, bool clear_space) {
49 if (!mr.is_empty()) {
50 size_t page_size = UseLargePages ? alignment() : os::vm_page_size();
51 HeapWord *start = (HeapWord*)round_to((intptr_t) mr.start(), page_size);
52 HeapWord *end = (HeapWord*)round_down((intptr_t) mr.end(), page_size);
53 if (end > start) {
54 size_t size = pointer_delta(end, start, sizeof(char));
55 if (clear_space) {
56 // Prefer page reallocation to migration.
57 os::free_memory((char*)start, size, page_size);
58 }
59 os::numa_make_global((char*)start, size);
60 }
61 }
62 }
63
64 void MutableSpace::pretouch_pages(MemRegion mr) {
65 for (volatile char *p = (char*)mr.start(); p < (char*)mr.end(); p += os::vm_page_size()) {
66 char t = *p; *p = t;
67 }
68 }
69
70 void MutableSpace::initialize(MemRegion mr,
71 bool clear_space,
72 bool mangle_space,
73 bool setup_pages) {
74
75 assert(Universe::on_page_boundary(mr.start()) && Universe::on_page_boundary(mr.end()),
76 "invalid space boundaries");
77
78 if (setup_pages && (UseNUMA || AlwaysPreTouch)) {
79 // The space may move left and right or expand/shrink.
80 // We'd like to enforce the desired page placement.
81 MemRegion head, tail;
82 if (last_setup_region().is_empty()) {
83 // If it's the first initialization don't limit the amount of work.
84 head = mr;
85 tail = MemRegion(mr.end(), mr.end());
86 } else {
87 // Is there an intersection with the address space?
88 MemRegion intersection = last_setup_region().intersection(mr);
89 if (intersection.is_empty()) {
90 intersection = MemRegion(mr.end(), mr.end());
91 }
92 // All the sizes below are in words.
93 size_t head_size = 0, tail_size = 0;
94 if (mr.start() <= intersection.start()) {
95 head_size = pointer_delta(intersection.start(), mr.start());
96 }
97 if(intersection.end() <= mr.end()) {
98 tail_size = pointer_delta(mr.end(), intersection.end());
99 }
100 // Limit the amount of page manipulation if necessary.
101 if (NUMASpaceResizeRate > 0 && !AlwaysPreTouch) {
102 const size_t change_size = head_size + tail_size;
103 const float setup_rate_words = NUMASpaceResizeRate >> LogBytesPerWord;
104 head_size = MIN2((size_t)(setup_rate_words * head_size / change_size),
105 head_size);
106 tail_size = MIN2((size_t)(setup_rate_words * tail_size / change_size),
107 tail_size);
108 }
109 head = MemRegion(intersection.start() - head_size, intersection.start());
110 tail = MemRegion(intersection.end(), intersection.end() + tail_size);
111 }
112 assert(mr.contains(head) && mr.contains(tail), "Sanity");
113
114 if (UseNUMA) {
115 numa_setup_pages(head, clear_space);
116 numa_setup_pages(tail, clear_space);
117 }
118
119 if (AlwaysPreTouch) {
120 pretouch_pages(head);
121 pretouch_pages(tail);
122 }
123
124 // Remember where we stopped so that we can continue later.
125 set_last_setup_region(MemRegion(head.start(), tail.end()));
126 }
127
128 set_bottom(mr.start());
129 set_end(mr.end());
130
131 if (clear_space) {
132 clear(mangle_space);
133 }
134 }
135
136 void MutableSpace::clear(bool mangle_space) {
137 set_top(bottom());
138 if (ZapUnusedHeapArea && mangle_space) {
139 mangle_unused_area();
140 }
141 }
142
143 #ifndef PRODUCT
144 void MutableSpace::check_mangled_unused_area(HeapWord* limit) {
145 mangler()->check_mangled_unused_area(limit);
146 }
147
148 void MutableSpace::check_mangled_unused_area_complete() {
149 mangler()->check_mangled_unused_area_complete();
150 }
151
152 // Mangle only the unused space that has not previously
153 // been mangled and that has not been allocated since being
154 // mangled.
155 void MutableSpace::mangle_unused_area() {
156 mangler()->mangle_unused_area();
157 }
158
159 void MutableSpace::mangle_unused_area_complete() {
160 mangler()->mangle_unused_area_complete();
161 }
162
163 void MutableSpace::mangle_region(MemRegion mr) {
164 SpaceMangler::mangle_region(mr);
165 }
166
167 void MutableSpace::set_top_for_allocations(HeapWord* v) {
168 mangler()->set_top_for_allocations(v);
169 }
170
171 void MutableSpace::set_top_for_allocations() {
172 mangler()->set_top_for_allocations(top());
173 }
174 #endif
175
176 // This version requires locking. */
177 HeapWord* MutableSpace::allocate(size_t size) {
178 assert(Heap_lock->owned_by_self() ||
179 (SafepointSynchronize::is_at_safepoint() &&
180 Thread::current()->is_VM_thread()),
181 "not locked");
182 HeapWord* obj = top();
183 if (pointer_delta(end(), obj) >= size) {
184 HeapWord* new_top = obj + size;
185 set_top(new_top);
186 assert(is_object_aligned((intptr_t)obj) && is_object_aligned((intptr_t)new_top),
187 "checking alignment");
188 return obj;
189 } else {
190 return NULL;
191 }
192 }
193
194 // This version is lock-free.
195 HeapWord* MutableSpace::cas_allocate(size_t size) {
196 do {
197 HeapWord* obj = top();
198 if (pointer_delta(end(), obj) >= size) {
199 HeapWord* new_top = obj + size;
200 HeapWord* result = (HeapWord*)Atomic::cmpxchg_ptr(new_top, top_addr(), obj);
201 // result can be one of two:
202 // the old top value: the exchange succeeded
203 // otherwise: the new value of the top is returned.
204 if (result != obj) {
205 continue; // another thread beat us to the allocation, try again
206 }
207 assert(is_object_aligned((intptr_t)obj) && is_object_aligned((intptr_t)new_top),
208 "checking alignment");
209 return obj;
210 } else {
211 return NULL;
212 }
213 } while (true);
214 }
215
216 // Try to deallocate previous allocation. Returns true upon success.
217 bool MutableSpace::cas_deallocate(HeapWord *obj, size_t size) {
218 HeapWord* expected_top = obj + size;
219 return (HeapWord*)Atomic::cmpxchg_ptr(obj, top_addr(), expected_top) == expected_top;
220 }
221
222 void MutableSpace::oop_iterate(ExtendedOopClosure* cl) {
223 HeapWord* obj_addr = bottom();
224 HeapWord* t = top();
225 // Could call objects iterate, but this is easier.
226 while (obj_addr < t) {
227 obj_addr += oop(obj_addr)->oop_iterate(cl);
228 }
229 }
230
231 void MutableSpace::oop_iterate_no_header(OopClosure* cl) {
232 HeapWord* obj_addr = bottom();
233 HeapWord* t = top();
234 // Could call objects iterate, but this is easier.
235 while (obj_addr < t) {
236 obj_addr += oop(obj_addr)->oop_iterate_no_header(cl);
237 }
238 }
239
240 void MutableSpace::object_iterate(ObjectClosure* cl) {
241 HeapWord* p = bottom();
242 while (p < top()) {
243 cl->do_object(oop(p));
244 p += oop(p)->size();
245 }
246 }
247
248 void MutableSpace::print_short() const { print_short_on(tty); }
249 void MutableSpace::print_short_on( outputStream* st) const {
250 st->print(" space " SIZE_FORMAT "K, %d%% used", capacity_in_bytes() / K,
251 (int) ((double) used_in_bytes() * 100 / capacity_in_bytes()));
252 }
253
254 void MutableSpace::print() const { print_on(tty); }
255 void MutableSpace::print_on(outputStream* st) const {
256 MutableSpace::print_short_on(st);
257 st->print_cr(" [" INTPTR_FORMAT "," INTPTR_FORMAT "," INTPTR_FORMAT ")",
258 bottom(), top(), end());
259 }
260
261 void MutableSpace::verify() {
262 HeapWord* p = bottom();
263 HeapWord* t = top();
264 HeapWord* prev_p = NULL;
265 while (p < t) {
266 oop(p)->verify();
267 prev_p = p;
268 p += oop(p)->size();
269 }
270 guarantee(p == top(), "end of last object must match end of space");
271 }

mercurial