Tue, 15 Apr 2014 17:02:20 -0400
8028497: SIGSEGV at ClassLoaderData::oops_do(OopClosure*, KlassClosure*, bool)
Summary: Keep class in CLD::_klasses list and mirror created for CDS classes if OOM during restore_shareable_info(). This keeps pointers consistent for CMS.
Reviewed-by: ehelin, stefank, jmasa, iklam
1 /*
2 * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "gc_implementation/shared/markSweep.inline.hpp"
29 #include "gc_interface/collectedHeap.inline.hpp"
30 #include "memory/genOopClosures.inline.hpp"
31 #include "memory/oopFactory.hpp"
32 #include "oops/instanceKlass.hpp"
33 #include "oops/instanceMirrorKlass.hpp"
34 #include "oops/instanceOop.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "oops/symbol.hpp"
37 #include "runtime/handles.inline.hpp"
38 #include "utilities/macros.hpp"
39 #if INCLUDE_ALL_GCS
40 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
41 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
42 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
43 #include "gc_implementation/g1/g1RemSet.inline.hpp"
44 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
45 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
46 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
47 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
48 #include "oops/oop.pcgc.inline.hpp"
49 #endif // INCLUDE_ALL_GCS
51 int InstanceMirrorKlass::_offset_of_static_fields = 0;
53 #ifdef ASSERT
54 template <class T> void assert_is_in(T *p) {
55 T heap_oop = oopDesc::load_heap_oop(p);
56 if (!oopDesc::is_null(heap_oop)) {
57 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
58 assert(Universe::heap()->is_in(o), "should be in heap");
59 }
60 }
61 template <class T> void assert_is_in_closed_subset(T *p) {
62 T heap_oop = oopDesc::load_heap_oop(p);
63 if (!oopDesc::is_null(heap_oop)) {
64 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
65 assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
66 }
67 }
68 template <class T> void assert_is_in_reserved(T *p) {
69 T heap_oop = oopDesc::load_heap_oop(p);
70 if (!oopDesc::is_null(heap_oop)) {
71 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
72 assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
73 }
74 }
75 template <class T> void assert_nothing(T *p) {}
77 #else
78 template <class T> void assert_is_in(T *p) {}
79 template <class T> void assert_is_in_closed_subset(T *p) {}
80 template <class T> void assert_is_in_reserved(T *p) {}
81 template <class T> void assert_nothing(T *p) {}
82 #endif // ASSERT
84 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
85 T, start_p, count, do_oop, \
86 assert_fn) \
87 { \
88 T* p = (T*)(start_p); \
89 T* const end = p + (count); \
90 while (p < end) { \
91 (assert_fn)(p); \
92 do_oop; \
93 ++p; \
94 } \
95 }
97 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
98 T, start_p, count, low, high, \
99 do_oop, assert_fn) \
100 { \
101 T* const l = (T*)(low); \
102 T* const h = (T*)(high); \
103 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
104 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \
105 "bounded region must be properly aligned"); \
106 T* p = (T*)(start_p); \
107 T* end = p + (count); \
108 if (p < l) p = l; \
109 if (end > h) end = h; \
110 while (p < end) { \
111 (assert_fn)(p); \
112 do_oop; \
113 ++p; \
114 } \
115 }
118 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \
119 do_oop, assert_fn) \
120 { \
121 if (UseCompressedOops) { \
122 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
123 start_p, count, \
124 do_oop, assert_fn) \
125 } else { \
126 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \
127 start_p, count, \
128 do_oop, assert_fn) \
129 } \
130 }
132 // The following macros call specialized macros, passing either oop or
133 // narrowOop as the specialization type. These test the UseCompressedOops
134 // flag.
135 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
136 do_oop, assert_fn) \
137 { \
138 if (UseCompressedOops) { \
139 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \
140 start_p, count, \
141 low, high, \
142 do_oop, assert_fn) \
143 } else { \
144 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \
145 start_p, count, \
146 low, high, \
147 do_oop, assert_fn) \
148 } \
149 }
152 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
153 InstanceKlass::oop_follow_contents(obj);
155 // Follow the klass field in the mirror.
156 Klass* klass = java_lang_Class::as_Klass(obj);
157 if (klass != NULL) {
158 // An anonymous class doesn't have its own class loader, so the call
159 // to follow_klass will mark and push its java mirror instead of the
160 // class loader. When handling the java mirror for an anonymous class
161 // we need to make sure its class loader data is claimed, this is done
162 // by calling follow_class_loader explicitly. For non-anonymous classes
163 // the call to follow_class_loader is made when the class loader itself
164 // is handled.
165 if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
166 MarkSweep::follow_class_loader(klass->class_loader_data());
167 } else {
168 MarkSweep::follow_klass(klass);
169 }
170 } else {
171 // If klass is NULL then this a mirror for a primitive type.
172 // We don't have to follow them, since they are handled as strong
173 // roots in Universe::oops_do.
174 assert(java_lang_Class::is_primitive(obj), "Sanity check");
175 }
177 InstanceMirrorKlass_OOP_ITERATE( \
178 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
179 MarkSweep::mark_and_push(p), \
180 assert_is_in_closed_subset)
181 }
183 #if INCLUDE_ALL_GCS
184 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
185 oop obj) {
186 InstanceKlass::oop_follow_contents(cm, obj);
188 // Follow the klass field in the mirror.
189 Klass* klass = java_lang_Class::as_Klass(obj);
190 if (klass != NULL) {
191 // An anonymous class doesn't have its own class loader, so the call
192 // to follow_klass will mark and push its java mirror instead of the
193 // class loader. When handling the java mirror for an anonymous class
194 // we need to make sure its class loader data is claimed, this is done
195 // by calling follow_class_loader explicitly. For non-anonymous classes
196 // the call to follow_class_loader is made when the class loader itself
197 // is handled.
198 if (klass->oop_is_instance() && InstanceKlass::cast(klass)->is_anonymous()) {
199 PSParallelCompact::follow_class_loader(cm, klass->class_loader_data());
200 } else {
201 PSParallelCompact::follow_klass(cm, klass);
202 }
203 } else {
204 // If klass is NULL then this a mirror for a primitive type.
205 // We don't have to follow them, since they are handled as strong
206 // roots in Universe::oops_do.
207 assert(java_lang_Class::is_primitive(obj), "Sanity check");
208 }
210 InstanceMirrorKlass_OOP_ITERATE( \
211 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
212 PSParallelCompact::mark_and_push(cm, p), \
213 assert_is_in)
214 }
215 #endif // INCLUDE_ALL_GCS
217 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
218 int size = oop_size(obj);
219 InstanceKlass::oop_adjust_pointers(obj);
221 InstanceMirrorKlass_OOP_ITERATE( \
222 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
223 MarkSweep::adjust_pointer(p), \
224 assert_nothing)
225 return size;
226 }
228 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \
229 InstanceMirrorKlass_OOP_ITERATE( \
230 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
231 (closure)->do_oop##nv_suffix(p), \
232 assert_is_in_closed_subset) \
233 return oop_size(obj); \
235 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \
236 InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \
237 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
238 mr.start(), mr.end(), \
239 (closure)->do_oop##nv_suffix(p), \
240 assert_is_in_closed_subset) \
241 return oop_size(obj); \
244 #define if_do_metadata_checked(closure, nv_suffix) \
245 /* Make sure the non-virtual and the virtual versions match. */ \
246 assert(closure->do_metadata##nv_suffix() == closure->do_metadata(), \
247 "Inconsistency in do_metadata"); \
248 if (closure->do_metadata##nv_suffix())
250 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
251 // all closures. Macros calling macros above for each oop size.
253 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
254 \
255 int InstanceMirrorKlass:: \
256 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
257 /* Get size before changing pointers */ \
258 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
259 \
260 InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
261 \
262 if_do_metadata_checked(closure, nv_suffix) { \
263 Klass* klass = java_lang_Class::as_Klass(obj); \
264 /* We'll get NULL for primitive mirrors. */ \
265 if (klass != NULL) { \
266 closure->do_klass##nv_suffix(klass); \
267 } \
268 } \
269 \
270 if (UseCompressedOops) { \
271 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
272 } else { \
273 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
274 } \
275 }
277 #if INCLUDE_ALL_GCS
278 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
279 \
280 int InstanceMirrorKlass:: \
281 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
282 /* Get size before changing pointers */ \
283 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
284 \
285 InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
286 \
287 if (UseCompressedOops) { \
288 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
289 } else { \
290 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
291 } \
292 }
293 #endif // INCLUDE_ALL_GCS
296 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
297 \
298 int InstanceMirrorKlass:: \
299 oop_oop_iterate##nv_suffix##_m(oop obj, \
300 OopClosureType* closure, \
301 MemRegion mr) { \
302 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
303 \
304 InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
305 \
306 if_do_metadata_checked(closure, nv_suffix) { \
307 if (mr.contains(obj)) { \
308 Klass* klass = java_lang_Class::as_Klass(obj); \
309 /* We'll get NULL for primitive mirrors. */ \
310 if (klass != NULL) { \
311 closure->do_klass##nv_suffix(klass); \
312 } \
313 } \
314 } \
315 \
316 if (UseCompressedOops) { \
317 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \
318 } else { \
319 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \
320 } \
321 }
323 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
324 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
325 #if INCLUDE_ALL_GCS
326 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
327 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
328 #endif // INCLUDE_ALL_GCS
329 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
330 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
332 #if INCLUDE_ALL_GCS
333 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
334 // Note that we don't have to follow the mirror -> klass pointer, since all
335 // klasses that are dirty will be scavenged when we iterate over the
336 // ClassLoaderData objects.
338 InstanceKlass::oop_push_contents(pm, obj);
339 InstanceMirrorKlass_OOP_ITERATE( \
340 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
341 if (PSScavenge::should_scavenge(p)) { \
342 pm->claim_or_forward_depth(p); \
343 }, \
344 assert_nothing )
345 }
347 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
348 int size = oop_size(obj);
349 InstanceKlass::oop_update_pointers(cm, obj);
351 InstanceMirrorKlass_OOP_ITERATE( \
352 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
353 PSParallelCompact::adjust_pointer(p), \
354 assert_nothing)
355 return size;
356 }
357 #endif // INCLUDE_ALL_GCS
359 int InstanceMirrorKlass::instance_size(KlassHandle k) {
360 if (k() != NULL && k->oop_is_instance()) {
361 return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
362 }
363 return size_helper();
364 }
366 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
367 // Query before forming handle.
368 int size = instance_size(k);
369 KlassHandle h_k(THREAD, this);
370 instanceOop i = (instanceOop)CollectedHeap::obj_allocate(h_k, size, CHECK_NULL);
372 // Since mirrors can be variable sized because of the static fields, store
373 // the size in the mirror itself.
374 java_lang_Class::set_oop_size(i, size);
376 return i;
377 }
379 int InstanceMirrorKlass::oop_size(oop obj) const {
380 return java_lang_Class::oop_size(obj);
381 }
383 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
384 Klass* k = java_lang_Class::as_Klass(obj);
385 if (k != NULL && k->oop_is_instance()) {
386 return InstanceKlass::cast(k)->static_oop_field_count();
387 }
388 return 0;
389 }