Wed, 02 Jan 2013 20:28:09 -0500
8005494: SIGSEGV in Rewriter::relocate_and_link() when testing Weblogic with CompressedOops and KlassPtrs
Summary: Relocate functions with jsr's when rewriting so not repeated after reading shared archive
Reviewed-by: twisti, jrose
1 /*
2 * Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/javaClasses.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "gc_implementation/shared/markSweep.inline.hpp"
29 #include "gc_interface/collectedHeap.inline.hpp"
30 #include "memory/genOopClosures.inline.hpp"
31 #include "memory/oopFactory.hpp"
32 #include "oops/instanceKlass.hpp"
33 #include "oops/instanceMirrorKlass.hpp"
34 #include "oops/instanceOop.hpp"
35 #include "oops/oop.inline.hpp"
36 #include "oops/symbol.hpp"
37 #include "runtime/handles.inline.hpp"
38 #ifndef SERIALGC
39 #include "gc_implementation/concurrentMarkSweep/cmsOopClosures.inline.hpp"
40 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
41 #include "gc_implementation/g1/g1OopClosures.inline.hpp"
42 #include "gc_implementation/g1/g1RemSet.inline.hpp"
43 #include "gc_implementation/g1/heapRegionSeq.inline.hpp"
44 #include "gc_implementation/parNew/parOopClosures.inline.hpp"
45 #include "gc_implementation/parallelScavenge/psPromotionManager.inline.hpp"
46 #include "gc_implementation/parallelScavenge/psScavenge.inline.hpp"
47 #include "oops/oop.pcgc.inline.hpp"
48 #endif
50 int InstanceMirrorKlass::_offset_of_static_fields = 0;
52 #ifdef ASSERT
53 template <class T> void assert_is_in(T *p) {
54 T heap_oop = oopDesc::load_heap_oop(p);
55 if (!oopDesc::is_null(heap_oop)) {
56 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
57 assert(Universe::heap()->is_in(o), "should be in heap");
58 }
59 }
60 template <class T> void assert_is_in_closed_subset(T *p) {
61 T heap_oop = oopDesc::load_heap_oop(p);
62 if (!oopDesc::is_null(heap_oop)) {
63 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
64 assert(Universe::heap()->is_in_closed_subset(o), "should be in closed");
65 }
66 }
67 template <class T> void assert_is_in_reserved(T *p) {
68 T heap_oop = oopDesc::load_heap_oop(p);
69 if (!oopDesc::is_null(heap_oop)) {
70 oop o = oopDesc::decode_heap_oop_not_null(heap_oop);
71 assert(Universe::heap()->is_in_reserved(o), "should be in reserved");
72 }
73 }
74 template <class T> void assert_nothing(T *p) {}
76 #else
77 template <class T> void assert_is_in(T *p) {}
78 template <class T> void assert_is_in_closed_subset(T *p) {}
79 template <class T> void assert_is_in_reserved(T *p) {}
80 template <class T> void assert_nothing(T *p) {}
81 #endif // ASSERT
83 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE( \
84 T, start_p, count, do_oop, \
85 assert_fn) \
86 { \
87 T* p = (T*)(start_p); \
88 T* const end = p + (count); \
89 while (p < end) { \
90 (assert_fn)(p); \
91 do_oop; \
92 ++p; \
93 } \
94 }
96 #define InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE( \
97 T, start_p, count, low, high, \
98 do_oop, assert_fn) \
99 { \
100 T* const l = (T*)(low); \
101 T* const h = (T*)(high); \
102 assert(mask_bits((intptr_t)l, sizeof(T)-1) == 0 && \
103 mask_bits((intptr_t)h, sizeof(T)-1) == 0, \
104 "bounded region must be properly aligned"); \
105 T* p = (T*)(start_p); \
106 T* end = p + (count); \
107 if (p < l) p = l; \
108 if (end > h) end = h; \
109 while (p < end) { \
110 (assert_fn)(p); \
111 do_oop; \
112 ++p; \
113 } \
114 }
117 #define InstanceMirrorKlass_OOP_ITERATE(start_p, count, \
118 do_oop, assert_fn) \
119 { \
120 if (UseCompressedOops) { \
121 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(narrowOop, \
122 start_p, count, \
123 do_oop, assert_fn) \
124 } else { \
125 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE(oop, \
126 start_p, count, \
127 do_oop, assert_fn) \
128 } \
129 }
131 // The following macros call specialized macros, passing either oop or
132 // narrowOop as the specialization type. These test the UseCompressedOops
133 // flag.
134 #define InstanceMirrorKlass_BOUNDED_OOP_ITERATE(start_p, count, low, high, \
135 do_oop, assert_fn) \
136 { \
137 if (UseCompressedOops) { \
138 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(narrowOop, \
139 start_p, count, \
140 low, high, \
141 do_oop, assert_fn) \
142 } else { \
143 InstanceMirrorKlass_SPECIALIZED_BOUNDED_OOP_ITERATE(oop, \
144 start_p, count, \
145 low, high, \
146 do_oop, assert_fn) \
147 } \
148 }
151 void InstanceMirrorKlass::oop_follow_contents(oop obj) {
152 InstanceKlass::oop_follow_contents(obj);
154 // Follow the klass field in the mirror.
155 Klass* klass = java_lang_Class::as_Klass(obj);
156 if (klass != NULL) {
157 MarkSweep::follow_klass(klass);
158 } else {
159 // If klass is NULL then this a mirror for a primitive type.
160 // We don't have to follow them, since they are handled as strong
161 // roots in Universe::oops_do.
162 assert(java_lang_Class::is_primitive(obj), "Sanity check");
163 }
165 InstanceMirrorKlass_OOP_ITERATE( \
166 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
167 MarkSweep::mark_and_push(p), \
168 assert_is_in_closed_subset)
169 }
171 #ifndef SERIALGC
172 void InstanceMirrorKlass::oop_follow_contents(ParCompactionManager* cm,
173 oop obj) {
174 InstanceKlass::oop_follow_contents(cm, obj);
176 // Follow the klass field in the mirror.
177 Klass* klass = java_lang_Class::as_Klass(obj);
178 if (klass != NULL) {
179 PSParallelCompact::follow_klass(cm, klass);
180 } else {
181 // If klass is NULL then this a mirror for a primitive type.
182 // We don't have to follow them, since they are handled as strong
183 // roots in Universe::oops_do.
184 assert(java_lang_Class::is_primitive(obj), "Sanity check");
185 }
187 InstanceMirrorKlass_OOP_ITERATE( \
188 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
189 PSParallelCompact::mark_and_push(cm, p), \
190 assert_is_in)
191 }
192 #endif // SERIALGC
194 int InstanceMirrorKlass::oop_adjust_pointers(oop obj) {
195 int size = oop_size(obj);
196 InstanceKlass::oop_adjust_pointers(obj);
198 // Follow the klass field in the mirror.
199 Klass* klass = java_lang_Class::as_Klass(obj);
200 if (klass != NULL) {
201 MarkSweep::adjust_klass(klass);
202 } else {
203 // If klass is NULL then this a mirror for a primitive type.
204 // We don't have to follow them, since they are handled as strong
205 // roots in Universe::oops_do.
206 assert(java_lang_Class::is_primitive(obj), "Sanity check");
207 }
209 InstanceMirrorKlass_OOP_ITERATE( \
210 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
211 MarkSweep::adjust_pointer(p), \
212 assert_nothing)
213 return size;
214 }
216 #define InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(T, nv_suffix) \
217 InstanceMirrorKlass_OOP_ITERATE( \
218 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
219 (closure)->do_oop##nv_suffix(p), \
220 assert_is_in_closed_subset) \
221 return oop_size(obj); \
223 #define InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(T, nv_suffix, mr) \
224 InstanceMirrorKlass_BOUNDED_OOP_ITERATE( \
225 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj), \
226 mr.start(), mr.end(), \
227 (closure)->do_oop##nv_suffix(p), \
228 assert_is_in_closed_subset) \
229 return oop_size(obj); \
232 #define if_do_metadata_checked(closure, nv_suffix) \
233 /* Make sure the non-virtual and the virtual versions match. */ \
234 assert(closure->do_metadata##nv_suffix() == closure->do_metadata(), \
235 "Inconsistency in do_metadata"); \
236 if (closure->do_metadata##nv_suffix())
238 // Macro to define InstanceMirrorKlass::oop_oop_iterate for virtual/nonvirtual for
239 // all closures. Macros calling macros above for each oop size.
241 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN(OopClosureType, nv_suffix) \
242 \
243 int InstanceMirrorKlass:: \
244 oop_oop_iterate##nv_suffix(oop obj, OopClosureType* closure) { \
245 /* Get size before changing pointers */ \
246 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
247 \
248 InstanceKlass::oop_oop_iterate##nv_suffix(obj, closure); \
249 \
250 if_do_metadata_checked(closure, nv_suffix) { \
251 Klass* klass = java_lang_Class::as_Klass(obj); \
252 /* We'll get NULL for primitive mirrors. */ \
253 if (klass != NULL) { \
254 closure->do_klass##nv_suffix(klass); \
255 } \
256 } \
257 \
258 if (UseCompressedOops) { \
259 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
260 } else { \
261 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
262 } \
263 }
265 #ifndef SERIALGC
266 #define InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN(OopClosureType, nv_suffix) \
267 \
268 int InstanceMirrorKlass:: \
269 oop_oop_iterate_backwards##nv_suffix(oop obj, OopClosureType* closure) { \
270 /* Get size before changing pointers */ \
271 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
272 \
273 InstanceKlass::oop_oop_iterate_backwards##nv_suffix(obj, closure); \
274 \
275 if (UseCompressedOops) { \
276 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(narrowOop, nv_suffix); \
277 } else { \
278 InstanceMirrorKlass_SPECIALIZED_OOP_ITERATE_DEFN(oop, nv_suffix); \
279 } \
280 }
281 #endif // !SERIALGC
284 #define InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m(OopClosureType, nv_suffix) \
285 \
286 int InstanceMirrorKlass:: \
287 oop_oop_iterate##nv_suffix##_m(oop obj, \
288 OopClosureType* closure, \
289 MemRegion mr) { \
290 SpecializationStats::record_iterate_call##nv_suffix(SpecializationStats::irk); \
291 \
292 InstanceKlass::oop_oop_iterate##nv_suffix##_m(obj, closure, mr); \
293 \
294 if_do_metadata_checked(closure, nv_suffix) { \
295 if (mr.contains(obj)) { \
296 Klass* klass = java_lang_Class::as_Klass(obj); \
297 /* We'll get NULL for primitive mirrors. */ \
298 if (klass != NULL) { \
299 closure->do_klass##nv_suffix(klass); \
300 } \
301 } \
302 } \
303 \
304 if (UseCompressedOops) { \
305 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(narrowOop, nv_suffix, mr); \
306 } else { \
307 InstanceMirrorKlass_BOUNDED_SPECIALIZED_OOP_ITERATE(oop, nv_suffix, mr); \
308 } \
309 }
311 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
312 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN)
313 #ifndef SERIALGC
314 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
315 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_BACKWARDS_DEFN)
316 #endif // SERIALGC
317 ALL_OOP_OOP_ITERATE_CLOSURES_1(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
318 ALL_OOP_OOP_ITERATE_CLOSURES_2(InstanceMirrorKlass_OOP_OOP_ITERATE_DEFN_m)
320 #ifndef SERIALGC
321 void InstanceMirrorKlass::oop_push_contents(PSPromotionManager* pm, oop obj) {
322 // Note that we don't have to follow the mirror -> klass pointer, since all
323 // klasses that are dirty will be scavenged when we iterate over the
324 // ClassLoaderData objects.
326 InstanceKlass::oop_push_contents(pm, obj);
327 InstanceMirrorKlass_OOP_ITERATE( \
328 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
329 if (PSScavenge::should_scavenge(p)) { \
330 pm->claim_or_forward_depth(p); \
331 }, \
332 assert_nothing )
333 }
335 int InstanceMirrorKlass::oop_update_pointers(ParCompactionManager* cm, oop obj) {
336 int size = oop_size(obj);
337 InstanceKlass::oop_update_pointers(cm, obj);
339 // Follow the klass field in the mirror.
340 Klass* klass = java_lang_Class::as_Klass(obj);
341 if (klass != NULL) {
342 PSParallelCompact::adjust_klass(cm, klass);
343 } else {
344 // If klass is NULL then this a mirror for a primitive type.
345 // We don't have to follow them, since they are handled as strong
346 // roots in Universe::oops_do.
347 assert(java_lang_Class::is_primitive(obj), "Sanity check");
348 }
350 InstanceMirrorKlass_OOP_ITERATE( \
351 start_of_static_fields(obj), java_lang_Class::static_oop_field_count(obj),\
352 PSParallelCompact::adjust_pointer(p), \
353 assert_nothing)
354 return size;
355 }
356 #endif // SERIALGC
358 int InstanceMirrorKlass::instance_size(KlassHandle k) {
359 if (k() != NULL && k->oop_is_instance()) {
360 return align_object_size(size_helper() + InstanceKlass::cast(k())->static_field_size());
361 }
362 return size_helper();
363 }
365 instanceOop InstanceMirrorKlass::allocate_instance(KlassHandle k, TRAPS) {
366 // Query before forming handle.
367 int size = instance_size(k);
368 KlassHandle h_k(THREAD, this);
369 instanceOop i = (instanceOop) CollectedHeap::Class_obj_allocate(h_k, size, k, CHECK_NULL);
370 return i;
371 }
373 int InstanceMirrorKlass::oop_size(oop obj) const {
374 return java_lang_Class::oop_size(obj);
375 }
377 int InstanceMirrorKlass::compute_static_oop_field_count(oop obj) {
378 Klass* k = java_lang_Class::as_Klass(obj);
379 if (k != NULL && k->oop_is_instance()) {
380 return InstanceKlass::cast(k)->static_oop_field_count();
381 }
382 return 0;
383 }