Mon, 29 Apr 2013 16:13:57 -0400
8011773: Some tests on Interned String crashed JVM with OOM
Summary: Instead of terminating the VM, throw OutOfMemoryError exceptions.
Reviewed-by: coleenp, dholmes
1 /*
2 * Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "gc_implementation/shared/markSweep.inline.hpp"
29 #include "gc_interface/collectedHeap.inline.hpp"
30 #include "memory/genOopClosures.inline.hpp"
31 #include "memory/oopFactory.hpp"
32 #include "oops/instanceKlass.hpp"
33 #include "oops/instanceMirrorKlass.hpp"
34 #include "oops/instanceOop.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "oops/symbol.hpp"
37 #include "runtime/handles.inline.hpp"
38 #include "utilities/macros.hpp"
39 #if INCLUDE_ALL_GCS
40 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
41 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
42 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
43 #include "gc_implementation/g1/g1RemSet.inline.hpp"
44 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
45 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
46 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
47 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
48 #include "oops/oop.pcgc.inline.hpp"
49 #endif // INCLUDE_ALL_GCS
51 int InstanceMirrorKlass::_offset_of_static_fields = 0;
53 #ifdef ASSERT
54 template <class T> void assert_is_in(T *p) {
55 T heap_oop = oopDesc::load_heap_oop(p);
56 if (!oopDesc::is_null(heap_oop)) {
57 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
58 assert(Universe::heap()->is_in(o), "should be in heap");
59 }
60 }
61 template <class T> void assert_is_in_closed_subset(T *p) {
62 T heap_oop = oopDesc::load_heap_oop(p);
63 if (!oopDesc::is_null(heap_oop)) {
64 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
65 assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
66 }
67 }
68 template <class T> void assert_is_in_reserved(T *p) {
69 T heap_oop = oopDesc::load_heap_oop(p);
70 if (!oopDesc::is_null(heap_oop)) {
71 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
72 assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
73 }
74 }
75 template <class T> void assert_nothing(T *p) {}
77 #else
78 template <class T> void assert_is_in(T *p) {}
79 template <class T> void assert_is_in_closed_subset(T *p) {}
80 template <class T> void assert_is_in_reserved(T *p) {}
81 template <class T> void assert_nothing(T *p) {}
82 #endif // ASSERT
84 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
85 T, start_p, count, do_oop, \
86 assert_fn) \
87 { \
88 T* p = (T*)(start_p); \
89 T* const end = p + (count); \
90 while (p < end) { \
91 (assert_fn)(p); \
92 do_oop; \
93 ++p; \
94 } \
95 }
97 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
98 T, start_p, count, low, high, \
99 do_oop, assert_fn) \
100 { \
101 T* const l = (T*)(low); \
102 T* const h = (T*)(high); \
103 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
104 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \
105 "bounded region must be properly aligned"); \
106 T* p = (T*)(start_p); \
107 T* end = p + (count); \
108 if (p < l) p = l; \
109 if (end > h) end = h; \
110 while (p < end) { \
111 (assert_fn)(p); \
112 do_oop; \
113 ++p; \
114 } \
115 }
118 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \
119 do_oop, assert_fn) \
120 { \
121 if (UseCompressedOops) { \
122 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
123 start_p, count, \
124 do_oop, assert_fn) \
125 } else { \
126 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \
127 start_p, count, \
128 do_oop, assert_fn) \
129 } \
130 }
132 // The following macros call specialized macros, passing either oop or
133 // narrowOop as the specialization type. These test the UseCompressedOops
134 // flag.
135 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
136 do_oop, assert_fn) \
137 { \
138 if (UseCompressedOops) { \
139 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \
140 start_p, count, \
141 low, high, \
142 do_oop, assert_fn) \
143 } else { \
144 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \
145 start_p, count, \
146 low, high, \
147 do_oop, assert_fn) \
148 } \
149 }
152 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
153 InstanceKlass::oop_follow_contents(obj);
155 // Follow the klass field in the mirror.
156 Klass* klass = java_lang_Class::as_Klass(obj);
157 if (klass != NULL) {
158 MarkSweep::follow_klass(klass);
159 } else {
160 // If klass is NULL then this a mirror for a primitive type.
161 // We don't have to follow them, since they are handled as strong
162 // roots in Universe::oops_do.
163 assert(java_lang_Class::is_primitive(obj), "Sanity check");
164 }
166 InstanceMirrorKlass_OOP_ITERATE( \
167 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
168 MarkSweep::mark_and_push(p), \
169 assert_is_in_closed_subset)
170 }
172 #if INCLUDE_ALL_GCS
173 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
174 oop obj) {
175 InstanceKlass::oop_follow_contents(cm, obj);
177 // Follow the klass field in the mirror.
178 Klass* klass = java_lang_Class::as_Klass(obj);
179 if (klass != NULL) {
180 PSParallelCompact::follow_klass(cm, klass);
181 } else {
182 // If klass is NULL then this a mirror for a primitive type.
183 // We don't have to follow them, since they are handled as strong
184 // roots in Universe::oops_do.
185 assert(java_lang_Class::is_primitive(obj), "Sanity check");
186 }
188 InstanceMirrorKlass_OOP_ITERATE( \
189 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
190 PSParallelCompact::mark_and_push(cm, p), \
191 assert_is_in)
192 }
193 #endif // INCLUDE_ALL_GCS
195 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
196 int size = oop_size(obj);
197 InstanceKlass::oop_adjust_pointers(obj);
199 // Follow the klass field in the mirror.
200 Klass* klass = java_lang_Class::as_Klass(obj);
201 if (klass != NULL) {
202 MarkSweep::adjust_klass(klass);
203 } else {
204 // If klass is NULL then this a mirror for a primitive type.
205 // We don't have to follow them, since they are handled as strong
206 // roots in Universe::oops_do.
207 assert(java_lang_Class::is_primitive(obj), "Sanity check");
208 }
210 InstanceMirrorKlass_OOP_ITERATE( \
211 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
212 MarkSweep::adjust_pointer(p), \
213 assert_nothing)
214 return size;
215 }
217 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \
218 InstanceMirrorKlass_OOP_ITERATE( \
219 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
220 (closure)->do_oop##nv_suffix(p), \
221 assert_is_in_closed_subset) \
222 return oop_size(obj); \
224 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \
225 InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \
226 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
227 mr.start(), mr.end(), \
228 (closure)->do_oop##nv_suffix(p), \
229 assert_is_in_closed_subset) \
230 return oop_size(obj); \
233 #define if_do_metadata_checked(closure, nv_suffix) \
234 /* Make sure the non-virtual and the virtual versions match. */ \
235 assert(closure->do_metadata##nv_suffix() == closure->do_metadata(), \
236 "Inconsistency in do_metadata"); \
237 if (closure->do_metadata##nv_suffix())
239 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
240 // all closures. Macros calling macros above for each oop size.
242 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
243 \
244 int InstanceMirrorKlass:: \
245 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
246 /* Get size before changing pointers */ \
247 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
248 \
249 InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
250 \
251 if_do_metadata_checked(closure, nv_suffix) { \
252 Klass* klass = java_lang_Class::as_Klass(obj); \
253 /* We'll get NULL for primitive mirrors. */ \
254 if (klass != NULL) { \
255 closure->do_klass##nv_suffix(klass); \
256 } \
257 } \
258 \
259 if (UseCompressedOops) { \
260 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
261 } else { \
262 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
263 } \
264 }
266 #if INCLUDE_ALL_GCS
267 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
268 \
269 int InstanceMirrorKlass:: \
270 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
271 /* Get size before changing pointers */ \
272 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
273 \
274 InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
275 \
276 if (UseCompressedOops) { \
277 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
278 } else { \
279 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
280 } \
281 }
282 #endif // INCLUDE_ALL_GCS
285 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
286 \
287 int InstanceMirrorKlass:: \
288 oop_oop_iterate##nv_suffix##_m(oop obj, \
289 OopClosureType* closure, \
290 MemRegion mr) { \
291 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
292 \
293 InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
294 \
295 if_do_metadata_checked(closure, nv_suffix) { \
296 if (mr.contains(obj)) { \
297 Klass* klass = java_lang_Class::as_Klass(obj); \
298 /* We'll get NULL for primitive mirrors. */ \
299 if (klass != NULL) { \
300 closure->do_klass##nv_suffix(klass); \
301 } \
302 } \
303 } \
304 \
305 if (UseCompressedOops) { \
306 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \
307 } else { \
308 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \
309 } \
310 }
312 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
313 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
314 #if INCLUDE_ALL_GCS
315 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
316 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
317 #endif // INCLUDE_ALL_GCS
318 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
319 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
321 #if INCLUDE_ALL_GCS
322 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
323 // Note that we don't have to follow the mirror -> klass pointer, since all
324 // klasses that are dirty will be scavenged when we iterate over the
325 // ClassLoaderData objects.
327 InstanceKlass::oop_push_contents(pm, obj);
328 InstanceMirrorKlass_OOP_ITERATE( \
329 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
330 if (PSScavenge::should_scavenge(p)) { \
331 pm->claim_or_forward_depth(p); \
332 }, \
333 assert_nothing )
334 }
336 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
337 int size = oop_size(obj);
338 InstanceKlass::oop_update_pointers(cm, obj);
340 // Follow the klass field in the mirror.
341 Klass* klass = java_lang_Class::as_Klass(obj);
342 if (klass != NULL) {
343 PSParallelCompact::adjust_klass(cm, klass);
344 } else {
345 // If klass is NULL then this a mirror for a primitive type.
346 // We don't have to follow them, since they are handled as strong
347 // roots in Universe::oops_do.
348 assert(java_lang_Class::is_primitive(obj), "Sanity check");
349 }
351 InstanceMirrorKlass_OOP_ITERATE( \
352 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
353 PSParallelCompact::adjust_pointer(p), \
354 assert_nothing)
355 return size;
356 }
357 #endif // INCLUDE_ALL_GCS
359 int InstanceMirrorKlass::instance_size(KlassHandle k) {
360 if (k() != NULL && k->oop_is_instance()) {
361 return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
362 }
363 return size_helper();
364 }
366 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
367 // Query before forming handle.
368 int size = instance_size(k);
369 KlassHandle h_k(THREAD, this);
370 instanceOop i = (instanceOop) CollectedHeap::Class_obj_allocate(h_k, size, k, CHECK_NULL);
371 return i;
372 }
374 int InstanceMirrorKlass::oop_size(oop obj) const {
375 return java_lang_Class::oop_size(obj);
376 }
378 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
379 Klass* k = java_lang_Class::as_Klass(obj);
380 if (k != NULL && k->oop_is_instance()) {
381 return InstanceKlass::cast(k)->static_oop_field_count();
382 }
383 return 0;
384 }