Fri, 25 Jan 2013 10:04:08 -0500
8000692: Remove old KERNEL code
Summary: Removed depreciated kernel VM source code from hotspot VM
Reviewed-by: dholmes, acorn
1 /*
2 * Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/metadataOnStackMark.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/verifier.hpp"
29 #include "code/codeCache.hpp"
30 #include "compiler/compileBroker.hpp"
31 #include "interpreter/oopMapCache.hpp"
32 #include "interpreter/rewriter.hpp"
33 #include "memory/gcLocker.hpp"
34 #include "memory/metadataFactory.hpp"
35 #include "memory/metaspaceShared.hpp"
36 #include "memory/universe.inline.hpp"
37 #include "oops/fieldStreams.hpp"
38 #include "oops/klassVtable.hpp"
39 #include "prims/jvmtiImpl.hpp"
40 #include "prims/jvmtiRedefineClasses.hpp"
41 #include "prims/methodComparator.hpp"
42 #include "runtime/deoptimization.hpp"
43 #include "runtime/relocator.hpp"
44 #include "utilities/bitMap.inline.hpp"
47 Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
48 Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
49 Method** VM_RedefineClasses::_matching_old_methods = NULL;
50 Method** VM_RedefineClasses::_matching_new_methods = NULL;
51 Method** VM_RedefineClasses::_deleted_methods = NULL;
52 Method** VM_RedefineClasses::_added_methods = NULL;
53 int VM_RedefineClasses::_matching_methods_length = 0;
54 int VM_RedefineClasses::_deleted_methods_length = 0;
55 int VM_RedefineClasses::_added_methods_length = 0;
56 Klass* VM_RedefineClasses::_the_class_oop = NULL;
59 VM_RedefineClasses::VM_RedefineClasses(jint class_count,
60 const jvmtiClassDefinition *class_defs,
61 JvmtiClassLoadKind class_load_kind) {
62 _class_count = class_count;
63 _class_defs = class_defs;
64 _class_load_kind = class_load_kind;
65 _res = JVMTI_ERROR_NONE;
66 }
68 bool VM_RedefineClasses::doit_prologue() {
69 if (_class_count == 0) {
70 _res = JVMTI_ERROR_NONE;
71 return false;
72 }
73 if (_class_defs == NULL) {
74 _res = JVMTI_ERROR_NULL_POINTER;
75 return false;
76 }
77 for (int i = 0; i < _class_count; i++) {
78 if (_class_defs[i].klass == NULL) {
79 _res = JVMTI_ERROR_INVALID_CLASS;
80 return false;
81 }
82 if (_class_defs[i].class_byte_count == 0) {
83 _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
84 return false;
85 }
86 if (_class_defs[i].class_bytes == NULL) {
87 _res = JVMTI_ERROR_NULL_POINTER;
88 return false;
89 }
90 }
92 // Start timer after all the sanity checks; not quite accurate, but
93 // better than adding a bunch of stop() calls.
94 RC_TIMER_START(_timer_vm_op_prologue);
96 // We first load new class versions in the prologue, because somewhere down the
97 // call chain it is required that the current thread is a Java thread.
98 _res = load_new_class_versions(Thread::current());
99 if (_res != JVMTI_ERROR_NONE) {
100 // free any successfully created classes, since none are redefined
101 for (int i = 0; i < _class_count; i++) {
102 if (_scratch_classes[i] != NULL) {
103 ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
104 // Free the memory for this class at class unloading time. Not before
105 // because CMS might think this is still live.
106 cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
107 }
108 }
109 // Free os::malloc allocated memory in load_new_class_version.
110 os::free(_scratch_classes);
111 RC_TIMER_STOP(_timer_vm_op_prologue);
112 return false;
113 }
115 RC_TIMER_STOP(_timer_vm_op_prologue);
116 return true;
117 }
119 void VM_RedefineClasses::doit() {
120 Thread *thread = Thread::current();
122 if (UseSharedSpaces) {
123 // Sharing is enabled so we remap the shared readonly space to
124 // shared readwrite, private just in case we need to redefine
125 // a shared class. We do the remap during the doit() phase of
126 // the safepoint to be safer.
127 if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
128 RC_TRACE_WITH_THREAD(0x00000001, thread,
129 ("failed to remap shared readonly space to readwrite, private"));
130 _res = JVMTI_ERROR_INTERNAL;
131 return;
132 }
133 }
135 // Mark methods seen on stack and everywhere else so old methods are not
136 // cleaned up if they're on the stack.
137 MetadataOnStackMark md_on_stack;
138 HandleMark hm(thread); // make sure any handles created are deleted
139 // before the stack walk again.
141 for (int i = 0; i < _class_count; i++) {
142 redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
143 ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
144 // Free the memory for this class at class unloading time. Not before
145 // because CMS might think this is still live.
146 cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
147 _scratch_classes[i] = NULL;
148 }
150 // Disable any dependent concurrent compilations
151 SystemDictionary::notice_modification();
153 // Set flag indicating that some invariants are no longer true.
154 // See jvmtiExport.hpp for detailed explanation.
155 JvmtiExport::set_has_redefined_a_class();
157 #ifdef ASSERT
158 SystemDictionary::classes_do(check_class, thread);
159 #endif
160 }
162 void VM_RedefineClasses::doit_epilogue() {
163 // Free os::malloc allocated memory.
164 os::free(_scratch_classes);
166 if (RC_TRACE_ENABLED(0x00000004)) {
167 // Used to have separate timers for "doit" and "all", but the timer
168 // overhead skewed the measurements.
169 jlong doit_time = _timer_rsc_phase1.milliseconds() +
170 _timer_rsc_phase2.milliseconds();
171 jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
173 RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
174 " prologue=" UINT64_FORMAT " doit=" UINT64_FORMAT, all_time,
175 _timer_vm_op_prologue.milliseconds(), doit_time));
176 RC_TRACE(0x00000004,
177 ("redefine_single_class: phase1=" UINT64_FORMAT " phase2=" UINT64_FORMAT,
178 _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
179 }
180 }
182 bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
183 // classes for primitives cannot be redefined
184 if (java_lang_Class::is_primitive(klass_mirror)) {
185 return false;
186 }
187 Klass* the_class_oop = java_lang_Class::as_Klass(klass_mirror);
188 // classes for arrays cannot be redefined
189 if (the_class_oop == NULL || !the_class_oop->oop_is_instance()) {
190 return false;
191 }
192 return true;
193 }
195 // Append the current entry at scratch_i in scratch_cp to *merge_cp_p
196 // where the end of *merge_cp_p is specified by *merge_cp_length_p. For
197 // direct CP entries, there is just the current entry to append. For
198 // indirect and double-indirect CP entries, there are zero or more
199 // referenced CP entries along with the current entry to append.
200 // Indirect and double-indirect CP entries are handled by recursive
201 // calls to append_entry() as needed. The referenced CP entries are
202 // always appended to *merge_cp_p before the referee CP entry. These
203 // referenced CP entries may already exist in *merge_cp_p in which case
204 // there is nothing extra to append and only the current entry is
205 // appended.
206 void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
207 int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
208 TRAPS) {
210 // append is different depending on entry tag type
211 switch (scratch_cp->tag_at(scratch_i).value()) {
213 // The old verifier is implemented outside the VM. It loads classes,
214 // but does not resolve constant pool entries directly so we never
215 // see Class entries here with the old verifier. Similarly the old
216 // verifier does not like Class entries in the input constant pool.
217 // The split-verifier is implemented in the VM so it can optionally
218 // and directly resolve constant pool entries to load classes. The
219 // split-verifier can accept either Class entries or UnresolvedClass
220 // entries in the input constant pool. We revert the appended copy
221 // back to UnresolvedClass so that either verifier will be happy
222 // with the constant pool entry.
223 case JVM_CONSTANT_Class:
224 {
225 // revert the copy to JVM_CONSTANT_UnresolvedClass
226 (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
227 scratch_cp->klass_name_at(scratch_i));
229 if (scratch_i != *merge_cp_length_p) {
230 // The new entry in *merge_cp_p is at a different index than
231 // the new entry in scratch_cp so we need to map the index values.
232 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
233 }
234 (*merge_cp_length_p)++;
235 } break;
237 // these are direct CP entries so they can be directly appended,
238 // but double and long take two constant pool entries
239 case JVM_CONSTANT_Double: // fall through
240 case JVM_CONSTANT_Long:
241 {
242 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
243 THREAD);
245 if (scratch_i != *merge_cp_length_p) {
246 // The new entry in *merge_cp_p is at a different index than
247 // the new entry in scratch_cp so we need to map the index values.
248 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
249 }
250 (*merge_cp_length_p) += 2;
251 } break;
253 // these are direct CP entries so they can be directly appended
254 case JVM_CONSTANT_Float: // fall through
255 case JVM_CONSTANT_Integer: // fall through
256 case JVM_CONSTANT_Utf8: // fall through
258 // This was an indirect CP entry, but it has been changed into
259 // Symbol*s so this entry can be directly appended.
260 case JVM_CONSTANT_String: // fall through
262 // These were indirect CP entries, but they have been changed into
263 // Symbol*s so these entries can be directly appended.
264 case JVM_CONSTANT_UnresolvedClass: // fall through
265 {
266 ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
267 THREAD);
269 if (scratch_i != *merge_cp_length_p) {
270 // The new entry in *merge_cp_p is at a different index than
271 // the new entry in scratch_cp so we need to map the index values.
272 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
273 }
274 (*merge_cp_length_p)++;
275 } break;
277 // this is an indirect CP entry so it needs special handling
278 case JVM_CONSTANT_NameAndType:
279 {
280 int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
281 int new_name_ref_i = 0;
282 bool match = (name_ref_i < *merge_cp_length_p) &&
283 scratch_cp->compare_entry_to(name_ref_i, *merge_cp_p, name_ref_i,
284 THREAD);
285 if (!match) {
286 // forward reference in *merge_cp_p or not a direct match
288 int found_i = scratch_cp->find_matching_entry(name_ref_i, *merge_cp_p,
289 THREAD);
290 if (found_i != 0) {
291 guarantee(found_i != name_ref_i,
292 "compare_entry_to() and find_matching_entry() do not agree");
294 // Found a matching entry somewhere else in *merge_cp_p so
295 // just need a mapping entry.
296 new_name_ref_i = found_i;
297 map_index(scratch_cp, name_ref_i, found_i);
298 } else {
299 // no match found so we have to append this entry to *merge_cp_p
300 append_entry(scratch_cp, name_ref_i, merge_cp_p, merge_cp_length_p,
301 THREAD);
302 // The above call to append_entry() can only append one entry
303 // so the post call query of *merge_cp_length_p is only for
304 // the sake of consistency.
305 new_name_ref_i = *merge_cp_length_p - 1;
306 }
307 }
309 int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
310 int new_signature_ref_i = 0;
311 match = (signature_ref_i < *merge_cp_length_p) &&
312 scratch_cp->compare_entry_to(signature_ref_i, *merge_cp_p,
313 signature_ref_i, THREAD);
314 if (!match) {
315 // forward reference in *merge_cp_p or not a direct match
317 int found_i = scratch_cp->find_matching_entry(signature_ref_i,
318 *merge_cp_p, THREAD);
319 if (found_i != 0) {
320 guarantee(found_i != signature_ref_i,
321 "compare_entry_to() and find_matching_entry() do not agree");
323 // Found a matching entry somewhere else in *merge_cp_p so
324 // just need a mapping entry.
325 new_signature_ref_i = found_i;
326 map_index(scratch_cp, signature_ref_i, found_i);
327 } else {
328 // no match found so we have to append this entry to *merge_cp_p
329 append_entry(scratch_cp, signature_ref_i, merge_cp_p,
330 merge_cp_length_p, THREAD);
331 // The above call to append_entry() can only append one entry
332 // so the post call query of *merge_cp_length_p is only for
333 // the sake of consistency.
334 new_signature_ref_i = *merge_cp_length_p - 1;
335 }
336 }
338 // If the referenced entries already exist in *merge_cp_p, then
339 // both new_name_ref_i and new_signature_ref_i will both be 0.
340 // In that case, all we are appending is the current entry.
341 if (new_name_ref_i == 0) {
342 new_name_ref_i = name_ref_i;
343 } else {
344 RC_TRACE(0x00080000,
345 ("NameAndType entry@%d name_ref_index change: %d to %d",
346 *merge_cp_length_p, name_ref_i, new_name_ref_i));
347 }
348 if (new_signature_ref_i == 0) {
349 new_signature_ref_i = signature_ref_i;
350 } else {
351 RC_TRACE(0x00080000,
352 ("NameAndType entry@%d signature_ref_index change: %d to %d",
353 *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
354 }
356 (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
357 new_name_ref_i, new_signature_ref_i);
358 if (scratch_i != *merge_cp_length_p) {
359 // The new entry in *merge_cp_p is at a different index than
360 // the new entry in scratch_cp so we need to map the index values.
361 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
362 }
363 (*merge_cp_length_p)++;
364 } break;
366 // this is a double-indirect CP entry so it needs special handling
367 case JVM_CONSTANT_Fieldref: // fall through
368 case JVM_CONSTANT_InterfaceMethodref: // fall through
369 case JVM_CONSTANT_Methodref:
370 {
371 int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
372 int new_klass_ref_i = 0;
373 bool match = (klass_ref_i < *merge_cp_length_p) &&
374 scratch_cp->compare_entry_to(klass_ref_i, *merge_cp_p, klass_ref_i,
375 THREAD);
376 if (!match) {
377 // forward reference in *merge_cp_p or not a direct match
379 int found_i = scratch_cp->find_matching_entry(klass_ref_i, *merge_cp_p,
380 THREAD);
381 if (found_i != 0) {
382 guarantee(found_i != klass_ref_i,
383 "compare_entry_to() and find_matching_entry() do not agree");
385 // Found a matching entry somewhere else in *merge_cp_p so
386 // just need a mapping entry.
387 new_klass_ref_i = found_i;
388 map_index(scratch_cp, klass_ref_i, found_i);
389 } else {
390 // no match found so we have to append this entry to *merge_cp_p
391 append_entry(scratch_cp, klass_ref_i, merge_cp_p, merge_cp_length_p,
392 THREAD);
393 // The above call to append_entry() can only append one entry
394 // so the post call query of *merge_cp_length_p is only for
395 // the sake of consistency. Without the optimization where we
396 // use JVM_CONSTANT_UnresolvedClass, then up to two entries
397 // could be appended.
398 new_klass_ref_i = *merge_cp_length_p - 1;
399 }
400 }
402 int name_and_type_ref_i =
403 scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
404 int new_name_and_type_ref_i = 0;
405 match = (name_and_type_ref_i < *merge_cp_length_p) &&
406 scratch_cp->compare_entry_to(name_and_type_ref_i, *merge_cp_p,
407 name_and_type_ref_i, THREAD);
408 if (!match) {
409 // forward reference in *merge_cp_p or not a direct match
411 int found_i = scratch_cp->find_matching_entry(name_and_type_ref_i,
412 *merge_cp_p, THREAD);
413 if (found_i != 0) {
414 guarantee(found_i != name_and_type_ref_i,
415 "compare_entry_to() and find_matching_entry() do not agree");
417 // Found a matching entry somewhere else in *merge_cp_p so
418 // just need a mapping entry.
419 new_name_and_type_ref_i = found_i;
420 map_index(scratch_cp, name_and_type_ref_i, found_i);
421 } else {
422 // no match found so we have to append this entry to *merge_cp_p
423 append_entry(scratch_cp, name_and_type_ref_i, merge_cp_p,
424 merge_cp_length_p, THREAD);
425 // The above call to append_entry() can append more than
426 // one entry so the post call query of *merge_cp_length_p
427 // is required in order to get the right index for the
428 // JVM_CONSTANT_NameAndType entry.
429 new_name_and_type_ref_i = *merge_cp_length_p - 1;
430 }
431 }
433 // If the referenced entries already exist in *merge_cp_p, then
434 // both new_klass_ref_i and new_name_and_type_ref_i will both be
435 // 0. In that case, all we are appending is the current entry.
436 if (new_klass_ref_i == 0) {
437 new_klass_ref_i = klass_ref_i;
438 }
439 if (new_name_and_type_ref_i == 0) {
440 new_name_and_type_ref_i = name_and_type_ref_i;
441 }
443 const char *entry_name;
444 switch (scratch_cp->tag_at(scratch_i).value()) {
445 case JVM_CONSTANT_Fieldref:
446 entry_name = "Fieldref";
447 (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
448 new_name_and_type_ref_i);
449 break;
450 case JVM_CONSTANT_InterfaceMethodref:
451 entry_name = "IFMethodref";
452 (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
453 new_klass_ref_i, new_name_and_type_ref_i);
454 break;
455 case JVM_CONSTANT_Methodref:
456 entry_name = "Methodref";
457 (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
458 new_name_and_type_ref_i);
459 break;
460 default:
461 guarantee(false, "bad switch");
462 break;
463 }
465 if (klass_ref_i != new_klass_ref_i) {
466 RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
467 entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
468 }
469 if (name_and_type_ref_i != new_name_and_type_ref_i) {
470 RC_TRACE(0x00080000,
471 ("%s entry@%d name_and_type_index changed: %d to %d",
472 entry_name, *merge_cp_length_p, name_and_type_ref_i,
473 new_name_and_type_ref_i));
474 }
476 if (scratch_i != *merge_cp_length_p) {
477 // The new entry in *merge_cp_p is at a different index than
478 // the new entry in scratch_cp so we need to map the index values.
479 map_index(scratch_cp, scratch_i, *merge_cp_length_p);
480 }
481 (*merge_cp_length_p)++;
482 } break;
484 // At this stage, Class or UnresolvedClass could be here, but not
485 // ClassIndex
486 case JVM_CONSTANT_ClassIndex: // fall through
488 // Invalid is used as the tag for the second constant pool entry
489 // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
490 // not be seen by itself.
491 case JVM_CONSTANT_Invalid: // fall through
493 // At this stage, String could be here, but not StringIndex
494 case JVM_CONSTANT_StringIndex: // fall through
496 // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
497 // here
498 case JVM_CONSTANT_UnresolvedClassInError: // fall through
500 default:
501 {
502 // leave a breadcrumb
503 jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
504 ShouldNotReachHere();
505 } break;
506 } // end switch tag value
507 } // end append_entry()
510 void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class, TRAPS) {
511 AnnotationArray* save;
513 Annotations* sca = scratch_class->annotations();
514 if (sca == NULL) return;
516 save = sca->get_method_annotations_of(i);
517 sca->set_method_annotations_of(scratch_class, i, sca->get_method_annotations_of(j), CHECK);
518 sca->set_method_annotations_of(scratch_class, j, save, CHECK);
520 save = sca->get_method_parameter_annotations_of(i);
521 sca->set_method_parameter_annotations_of(scratch_class, i, sca->get_method_parameter_annotations_of(j), CHECK);
522 sca->set_method_parameter_annotations_of(scratch_class, j, save, CHECK);
524 save = sca->get_method_default_annotations_of(i);
525 sca->set_method_default_annotations_of(scratch_class, i, sca->get_method_default_annotations_of(j), CHECK);
526 sca->set_method_default_annotations_of(scratch_class, j, save, CHECK);
527 }
530 jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
531 instanceKlassHandle the_class,
532 instanceKlassHandle scratch_class) {
533 int i;
535 // Check superclasses, or rather their names, since superclasses themselves can be
536 // requested to replace.
537 // Check for NULL superclass first since this might be java.lang.Object
538 if (the_class->super() != scratch_class->super() &&
539 (the_class->super() == NULL || scratch_class->super() == NULL ||
540 the_class->super()->name() !=
541 scratch_class->super()->name())) {
542 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
543 }
545 // Check if the number, names and order of directly implemented interfaces are the same.
546 // I think in principle we should just check if the sets of names of directly implemented
547 // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
548 // .java file, also changes in .class file) should not matter. However, comparing sets is
549 // technically a bit more difficult, and, more importantly, I am not sure at present that the
550 // order of interfaces does not matter on the implementation level, i.e. that the VM does not
551 // rely on it somewhere.
552 Array<Klass*>* k_interfaces = the_class->local_interfaces();
553 Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
554 int n_intfs = k_interfaces->length();
555 if (n_intfs != k_new_interfaces->length()) {
556 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
557 }
558 for (i = 0; i < n_intfs; i++) {
559 if (k_interfaces->at(i)->name() !=
560 k_new_interfaces->at(i)->name()) {
561 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
562 }
563 }
565 // Check whether class is in the error init state.
566 if (the_class->is_in_error_state()) {
567 // TBD #5057930: special error code is needed in 1.6
568 return JVMTI_ERROR_INVALID_CLASS;
569 }
571 // Check whether class modifiers are the same.
572 jushort old_flags = (jushort) the_class->access_flags().get_flags();
573 jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
574 if (old_flags != new_flags) {
575 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
576 }
578 // Check if the number, names, types and order of fields declared in these classes
579 // are the same.
580 JavaFieldStream old_fs(the_class);
581 JavaFieldStream new_fs(scratch_class);
582 for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
583 // access
584 old_flags = old_fs.access_flags().as_short();
585 new_flags = new_fs.access_flags().as_short();
586 if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
587 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
588 }
589 // offset
590 if (old_fs.offset() != new_fs.offset()) {
591 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
592 }
593 // name and signature
594 Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
595 Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
596 Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
597 Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
598 if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
599 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
600 }
601 }
603 // If both streams aren't done then we have a differing number of
604 // fields.
605 if (!old_fs.done() || !new_fs.done()) {
606 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
607 }
609 // Do a parallel walk through the old and new methods. Detect
610 // cases where they match (exist in both), have been added in
611 // the new methods, or have been deleted (exist only in the
612 // old methods). The class file parser places methods in order
613 // by method name, but does not order overloaded methods by
614 // signature. In order to determine what fate befell the methods,
615 // this code places the overloaded new methods that have matching
616 // old methods in the same order as the old methods and places
617 // new overloaded methods at the end of overloaded methods of
618 // that name. The code for this order normalization is adapted
619 // from the algorithm used in InstanceKlass::find_method().
620 // Since we are swapping out of order entries as we find them,
621 // we only have to search forward through the overloaded methods.
622 // Methods which are added and have the same name as an existing
623 // method (but different signature) will be put at the end of
624 // the methods with that name, and the name mismatch code will
625 // handle them.
626 Array<Method*>* k_old_methods(the_class->methods());
627 Array<Method*>* k_new_methods(scratch_class->methods());
628 int n_old_methods = k_old_methods->length();
629 int n_new_methods = k_new_methods->length();
630 Thread* thread = Thread::current();
632 int ni = 0;
633 int oi = 0;
634 while (true) {
635 Method* k_old_method;
636 Method* k_new_method;
637 enum { matched, added, deleted, undetermined } method_was = undetermined;
639 if (oi >= n_old_methods) {
640 if (ni >= n_new_methods) {
641 break; // we've looked at everything, done
642 }
643 // New method at the end
644 k_new_method = k_new_methods->at(ni);
645 method_was = added;
646 } else if (ni >= n_new_methods) {
647 // Old method, at the end, is deleted
648 k_old_method = k_old_methods->at(oi);
649 method_was = deleted;
650 } else {
651 // There are more methods in both the old and new lists
652 k_old_method = k_old_methods->at(oi);
653 k_new_method = k_new_methods->at(ni);
654 if (k_old_method->name() != k_new_method->name()) {
655 // Methods are sorted by method name, so a mismatch means added
656 // or deleted
657 if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
658 method_was = added;
659 } else {
660 method_was = deleted;
661 }
662 } else if (k_old_method->signature() == k_new_method->signature()) {
663 // Both the name and signature match
664 method_was = matched;
665 } else {
666 // The name matches, but the signature doesn't, which means we have to
667 // search forward through the new overloaded methods.
668 int nj; // outside the loop for post-loop check
669 for (nj = ni + 1; nj < n_new_methods; nj++) {
670 Method* m = k_new_methods->at(nj);
671 if (k_old_method->name() != m->name()) {
672 // reached another method name so no more overloaded methods
673 method_was = deleted;
674 break;
675 }
676 if (k_old_method->signature() == m->signature()) {
677 // found a match so swap the methods
678 k_new_methods->at_put(ni, m);
679 k_new_methods->at_put(nj, k_new_method);
680 k_new_method = m;
681 method_was = matched;
682 break;
683 }
684 }
686 if (nj >= n_new_methods) {
687 // reached the end without a match; so method was deleted
688 method_was = deleted;
689 }
690 }
691 }
693 switch (method_was) {
694 case matched:
695 // methods match, be sure modifiers do too
696 old_flags = (jushort) k_old_method->access_flags().get_flags();
697 new_flags = (jushort) k_new_method->access_flags().get_flags();
698 if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
699 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
700 }
701 {
702 u2 new_num = k_new_method->method_idnum();
703 u2 old_num = k_old_method->method_idnum();
704 if (new_num != old_num) {
705 Method* idnum_owner = scratch_class->method_with_idnum(old_num);
706 if (idnum_owner != NULL) {
707 // There is already a method assigned this idnum -- switch them
708 idnum_owner->set_method_idnum(new_num);
709 }
710 k_new_method->set_method_idnum(old_num);
711 swap_all_method_annotations(old_num, new_num, scratch_class, thread);
712 if (thread->has_pending_exception()) {
713 return JVMTI_ERROR_OUT_OF_MEMORY;
714 }
715 }
716 }
717 RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
718 k_new_method->name_and_sig_as_C_string(), ni,
719 k_old_method->name_and_sig_as_C_string(), oi));
720 // advance to next pair of methods
721 ++oi;
722 ++ni;
723 break;
724 case added:
725 // method added, see if it is OK
726 new_flags = (jushort) k_new_method->access_flags().get_flags();
727 if ((new_flags & JVM_ACC_PRIVATE) == 0
728 // hack: private should be treated as final, but alas
729 || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
730 ) {
731 // new methods must be private
732 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
733 }
734 {
735 u2 num = the_class->next_method_idnum();
736 if (num == ConstMethod::UNSET_IDNUM) {
737 // cannot add any more methods
738 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
739 }
740 u2 new_num = k_new_method->method_idnum();
741 Method* idnum_owner = scratch_class->method_with_idnum(num);
742 if (idnum_owner != NULL) {
743 // There is already a method assigned this idnum -- switch them
744 idnum_owner->set_method_idnum(new_num);
745 }
746 k_new_method->set_method_idnum(num);
747 swap_all_method_annotations(new_num, num, scratch_class, thread);
748 if (thread->has_pending_exception()) {
749 return JVMTI_ERROR_OUT_OF_MEMORY;
750 }
751 }
752 RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
753 k_new_method->name_and_sig_as_C_string(), ni));
754 ++ni; // advance to next new method
755 break;
756 case deleted:
757 // method deleted, see if it is OK
758 old_flags = (jushort) k_old_method->access_flags().get_flags();
759 if ((old_flags & JVM_ACC_PRIVATE) == 0
760 // hack: private should be treated as final, but alas
761 || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
762 ) {
763 // deleted methods must be private
764 return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
765 }
766 RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
767 k_old_method->name_and_sig_as_C_string(), oi));
768 ++oi; // advance to next old method
769 break;
770 default:
771 ShouldNotReachHere();
772 }
773 }
775 return JVMTI_ERROR_NONE;
776 }
779 // Find new constant pool index value for old constant pool index value
780 // by seaching the index map. Returns zero (0) if there is no mapped
781 // value for the old constant pool index.
782 int VM_RedefineClasses::find_new_index(int old_index) {
783 if (_index_map_count == 0) {
784 // map is empty so nothing can be found
785 return 0;
786 }
788 if (old_index < 1 || old_index >= _index_map_p->length()) {
789 // The old_index is out of range so it is not mapped. This should
790 // not happen in regular constant pool merging use, but it can
791 // happen if a corrupt annotation is processed.
792 return 0;
793 }
795 int value = _index_map_p->at(old_index);
796 if (value == -1) {
797 // the old_index is not mapped
798 return 0;
799 }
801 return value;
802 } // end find_new_index()
805 // Returns true if the current mismatch is due to a resolved/unresolved
806 // class pair. Otherwise, returns false.
807 bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
808 int index1, constantPoolHandle cp2, int index2) {
810 jbyte t1 = cp1->tag_at(index1).value();
811 if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
812 return false; // wrong entry type; not our special case
813 }
815 jbyte t2 = cp2->tag_at(index2).value();
816 if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
817 return false; // wrong entry type; not our special case
818 }
820 if (t1 == t2) {
821 return false; // not a mismatch; not our special case
822 }
824 char *s1 = cp1->klass_name_at(index1)->as_C_string();
825 char *s2 = cp2->klass_name_at(index2)->as_C_string();
826 if (strcmp(s1, s2) != 0) {
827 return false; // strings don't match; not our special case
828 }
830 return true; // made it through the gauntlet; this is our special case
831 } // end is_unresolved_class_mismatch()
834 jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
836 // For consistency allocate memory using os::malloc wrapper.
837 _scratch_classes = (Klass**)
838 os::malloc(sizeof(Klass*) * _class_count, mtClass);
839 if (_scratch_classes == NULL) {
840 return JVMTI_ERROR_OUT_OF_MEMORY;
841 }
842 // Zero initialize the _scratch_classes array.
843 for (int i = 0; i < _class_count; i++) {
844 _scratch_classes[i] = NULL;
845 }
847 ResourceMark rm(THREAD);
849 JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
850 // state can only be NULL if the current thread is exiting which
851 // should not happen since we're trying to do a RedefineClasses
852 guarantee(state != NULL, "exiting thread calling load_new_class_versions");
853 for (int i = 0; i < _class_count; i++) {
854 // Create HandleMark so that any handles created while loading new class
855 // versions are deleted. Constant pools are deallocated while merging
856 // constant pools
857 HandleMark hm(THREAD);
859 oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
860 // classes for primitives cannot be redefined
861 if (!is_modifiable_class(mirror)) {
862 return JVMTI_ERROR_UNMODIFIABLE_CLASS;
863 }
864 Klass* the_class_oop = java_lang_Class::as_Klass(mirror);
865 instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
866 Symbol* the_class_sym = the_class->name();
868 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
869 RC_TRACE_WITH_THREAD(0x00000001, THREAD,
870 ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
871 the_class->external_name(), _class_load_kind,
872 os::available_memory() >> 10));
874 ClassFileStream st((u1*) _class_defs[i].class_bytes,
875 _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
877 // Parse the stream.
878 Handle the_class_loader(THREAD, the_class->class_loader());
879 Handle protection_domain(THREAD, the_class->protection_domain());
880 // Set redefined class handle in JvmtiThreadState class.
881 // This redefined class is sent to agent event handler for class file
882 // load hook event.
883 state->set_class_being_redefined(&the_class, _class_load_kind);
885 Klass* k = SystemDictionary::parse_stream(the_class_sym,
886 the_class_loader,
887 protection_domain,
888 &st,
889 THREAD);
890 // Clear class_being_redefined just to be sure.
891 state->clear_class_being_redefined();
893 // TODO: if this is retransform, and nothing changed we can skip it
895 instanceKlassHandle scratch_class (THREAD, k);
897 // Need to clean up allocated InstanceKlass if there's an error so assign
898 // the result here. Caller deallocates all the scratch classes in case of
899 // an error.
900 _scratch_classes[i] = k;
902 if (HAS_PENDING_EXCEPTION) {
903 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
904 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
905 RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
906 ex_name->as_C_string()));
907 CLEAR_PENDING_EXCEPTION;
909 if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
910 return JVMTI_ERROR_UNSUPPORTED_VERSION;
911 } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
912 return JVMTI_ERROR_INVALID_CLASS_FORMAT;
913 } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
914 return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
915 } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
916 // The message will be "XXX (wrong name: YYY)"
917 return JVMTI_ERROR_NAMES_DONT_MATCH;
918 } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
919 return JVMTI_ERROR_OUT_OF_MEMORY;
920 } else { // Just in case more exceptions can be thrown..
921 return JVMTI_ERROR_FAILS_VERIFICATION;
922 }
923 }
925 // Ensure class is linked before redefine
926 if (!the_class->is_linked()) {
927 the_class->link_class(THREAD);
928 if (HAS_PENDING_EXCEPTION) {
929 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
930 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
931 RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
932 ex_name->as_C_string()));
933 CLEAR_PENDING_EXCEPTION;
934 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
935 return JVMTI_ERROR_OUT_OF_MEMORY;
936 } else {
937 return JVMTI_ERROR_INTERNAL;
938 }
939 }
940 }
942 // Do the validity checks in compare_and_normalize_class_versions()
943 // before verifying the byte codes. By doing these checks first, we
944 // limit the number of functions that require redirection from
945 // the_class to scratch_class. In particular, we don't have to
946 // modify JNI GetSuperclass() and thus won't change its performance.
947 jvmtiError res = compare_and_normalize_class_versions(the_class,
948 scratch_class);
949 if (res != JVMTI_ERROR_NONE) {
950 return res;
951 }
953 // verify what the caller passed us
954 {
955 // The bug 6214132 caused the verification to fail.
956 // Information about the_class and scratch_class is temporarily
957 // recorded into jvmtiThreadState. This data is used to redirect
958 // the_class to scratch_class in the JVM_* functions called by the
959 // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
960 // description.
961 RedefineVerifyMark rvm(&the_class, &scratch_class, state);
962 Verifier::verify(
963 scratch_class, Verifier::ThrowException, true, THREAD);
964 }
966 if (HAS_PENDING_EXCEPTION) {
967 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
968 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
969 RC_TRACE_WITH_THREAD(0x00000002, THREAD,
970 ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
971 CLEAR_PENDING_EXCEPTION;
972 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
973 return JVMTI_ERROR_OUT_OF_MEMORY;
974 } else {
975 // tell the caller the bytecodes are bad
976 return JVMTI_ERROR_FAILS_VERIFICATION;
977 }
978 }
980 res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
981 if (res != JVMTI_ERROR_NONE) {
982 return res;
983 }
985 if (VerifyMergedCPBytecodes) {
986 // verify what we have done during constant pool merging
987 {
988 RedefineVerifyMark rvm(&the_class, &scratch_class, state);
989 Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
990 }
992 if (HAS_PENDING_EXCEPTION) {
993 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
994 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
995 RC_TRACE_WITH_THREAD(0x00000002, THREAD,
996 ("verify_byte_codes post merge-CP exception: '%s'",
997 ex_name->as_C_string()));
998 CLEAR_PENDING_EXCEPTION;
999 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1000 return JVMTI_ERROR_OUT_OF_MEMORY;
1001 } else {
1002 // tell the caller that constant pool merging screwed up
1003 return JVMTI_ERROR_INTERNAL;
1004 }
1005 }
1006 }
1008 Rewriter::rewrite(scratch_class, THREAD);
1009 if (!HAS_PENDING_EXCEPTION) {
1010 scratch_class->link_methods(THREAD);
1011 }
1012 if (HAS_PENDING_EXCEPTION) {
1013 Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
1014 CLEAR_PENDING_EXCEPTION;
1015 if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
1016 return JVMTI_ERROR_OUT_OF_MEMORY;
1017 } else {
1018 return JVMTI_ERROR_INTERNAL;
1019 }
1020 }
1022 // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
1023 RC_TRACE_WITH_THREAD(0x00000001, THREAD,
1024 ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
1025 the_class->external_name(), os::available_memory() >> 10));
1026 }
1028 return JVMTI_ERROR_NONE;
1029 }
1032 // Map old_index to new_index as needed. scratch_cp is only needed
1033 // for RC_TRACE() calls.
1034 void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
1035 int old_index, int new_index) {
1036 if (find_new_index(old_index) != 0) {
1037 // old_index is already mapped
1038 return;
1039 }
1041 if (old_index == new_index) {
1042 // no mapping is needed
1043 return;
1044 }
1046 _index_map_p->at_put(old_index, new_index);
1047 _index_map_count++;
1049 RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
1050 scratch_cp->tag_at(old_index).value(), old_index, new_index));
1051 } // end map_index()
1054 // Merge old_cp and scratch_cp and return the results of the merge via
1055 // merge_cp_p. The number of entries in *merge_cp_p is returned via
1056 // merge_cp_length_p. The entries in old_cp occupy the same locations
1057 // in *merge_cp_p. Also creates a map of indices from entries in
1058 // scratch_cp to the corresponding entry in *merge_cp_p. Index map
1059 // entries are only created for entries in scratch_cp that occupy a
1060 // different location in *merged_cp_p.
1061 bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
1062 constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
1063 int *merge_cp_length_p, TRAPS) {
1065 if (merge_cp_p == NULL) {
1066 assert(false, "caller must provide scratch constantPool");
1067 return false; // robustness
1068 }
1069 if (merge_cp_length_p == NULL) {
1070 assert(false, "caller must provide scratch CP length");
1071 return false; // robustness
1072 }
1073 // Worst case we need old_cp->length() + scratch_cp()->length(),
1074 // but the caller might be smart so make sure we have at least
1075 // the minimum.
1076 if ((*merge_cp_p)->length() < old_cp->length()) {
1077 assert(false, "merge area too small");
1078 return false; // robustness
1079 }
1081 RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1082 ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
1083 scratch_cp->length()));
1085 {
1086 // Pass 0:
1087 // The old_cp is copied to *merge_cp_p; this means that any code
1088 // using old_cp does not have to change. This work looks like a
1089 // perfect fit for ConstantPool*::copy_cp_to(), but we need to
1090 // handle one special case:
1091 // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
1092 // This will make verification happy.
1094 int old_i; // index into old_cp
1096 // index zero (0) is not used in constantPools
1097 for (old_i = 1; old_i < old_cp->length(); old_i++) {
1098 // leave debugging crumb
1099 jbyte old_tag = old_cp->tag_at(old_i).value();
1100 switch (old_tag) {
1101 case JVM_CONSTANT_Class:
1102 case JVM_CONSTANT_UnresolvedClass:
1103 // revert the copy to JVM_CONSTANT_UnresolvedClass
1104 // May be resolving while calling this so do the same for
1105 // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
1106 (*merge_cp_p)->unresolved_klass_at_put(old_i,
1107 old_cp->klass_name_at(old_i));
1108 break;
1110 case JVM_CONSTANT_Double:
1111 case JVM_CONSTANT_Long:
1112 // just copy the entry to *merge_cp_p, but double and long take
1113 // two constant pool entries
1114 ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1115 old_i++;
1116 break;
1118 default:
1119 // just copy the entry to *merge_cp_p
1120 ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
1121 break;
1122 }
1123 } // end for each old_cp entry
1125 // We don't need to sanity check that *merge_cp_length_p is within
1126 // *merge_cp_p bounds since we have the minimum on-entry check above.
1127 (*merge_cp_length_p) = old_i;
1128 }
1130 // merge_cp_len should be the same as old_cp->length() at this point
1131 // so this trace message is really a "warm-and-breathing" message.
1132 RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1133 ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
1135 int scratch_i; // index into scratch_cp
1136 {
1137 // Pass 1a:
1138 // Compare scratch_cp entries to the old_cp entries that we have
1139 // already copied to *merge_cp_p. In this pass, we are eliminating
1140 // exact duplicates (matching entry at same index) so we only
1141 // compare entries in the common indice range.
1142 int increment = 1;
1143 int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
1144 for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
1145 switch (scratch_cp->tag_at(scratch_i).value()) {
1146 case JVM_CONSTANT_Double:
1147 case JVM_CONSTANT_Long:
1148 // double and long take two constant pool entries
1149 increment = 2;
1150 break;
1152 default:
1153 increment = 1;
1154 break;
1155 }
1157 bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
1158 scratch_i, CHECK_0);
1159 if (match) {
1160 // found a match at the same index so nothing more to do
1161 continue;
1162 } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
1163 *merge_cp_p, scratch_i)) {
1164 // The mismatch in compare_entry_to() above is because of a
1165 // resolved versus unresolved class entry at the same index
1166 // with the same string value. Since Pass 0 reverted any
1167 // class entries to unresolved class entries in *merge_cp_p,
1168 // we go with the unresolved class entry.
1169 continue;
1170 }
1172 int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
1173 CHECK_0);
1174 if (found_i != 0) {
1175 guarantee(found_i != scratch_i,
1176 "compare_entry_to() and find_matching_entry() do not agree");
1178 // Found a matching entry somewhere else in *merge_cp_p so
1179 // just need a mapping entry.
1180 map_index(scratch_cp, scratch_i, found_i);
1181 continue;
1182 }
1184 // The find_matching_entry() call above could fail to find a match
1185 // due to a resolved versus unresolved class or string entry situation
1186 // like we solved above with the is_unresolved_*_mismatch() calls.
1187 // However, we would have to call is_unresolved_*_mismatch() over
1188 // all of *merge_cp_p (potentially) and that doesn't seem to be
1189 // worth the time.
1191 // No match found so we have to append this entry and any unique
1192 // referenced entries to *merge_cp_p.
1193 append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1194 CHECK_0);
1195 }
1196 }
1198 RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1199 ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1200 *merge_cp_length_p, scratch_i, _index_map_count));
1202 if (scratch_i < scratch_cp->length()) {
1203 // Pass 1b:
1204 // old_cp is smaller than scratch_cp so there are entries in
1205 // scratch_cp that we have not yet processed. We take care of
1206 // those now.
1207 int increment = 1;
1208 for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
1209 switch (scratch_cp->tag_at(scratch_i).value()) {
1210 case JVM_CONSTANT_Double:
1211 case JVM_CONSTANT_Long:
1212 // double and long take two constant pool entries
1213 increment = 2;
1214 break;
1216 default:
1217 increment = 1;
1218 break;
1219 }
1221 int found_i =
1222 scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
1223 if (found_i != 0) {
1224 // Found a matching entry somewhere else in *merge_cp_p so
1225 // just need a mapping entry.
1226 map_index(scratch_cp, scratch_i, found_i);
1227 continue;
1228 }
1230 // No match found so we have to append this entry and any unique
1231 // referenced entries to *merge_cp_p.
1232 append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
1233 CHECK_0);
1234 }
1236 RC_TRACE_WITH_THREAD(0x00020000, THREAD,
1237 ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
1238 *merge_cp_length_p, scratch_i, _index_map_count));
1239 }
1241 return true;
1242 } // end merge_constant_pools()
1245 // Scoped object to clean up the constant pool(s) created for merging
1246 class MergeCPCleaner {
1247 ClassLoaderData* _loader_data;
1248 ConstantPool* _cp;
1249 ConstantPool* _scratch_cp;
1250 public:
1251 MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
1252 _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
1253 ~MergeCPCleaner() {
1254 _loader_data->add_to_deallocate_list(_cp);
1255 if (_scratch_cp != NULL) {
1256 _loader_data->add_to_deallocate_list(_scratch_cp);
1257 }
1258 }
1259 void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
1260 };
1262 // Merge constant pools between the_class and scratch_class and
1263 // potentially rewrite bytecodes in scratch_class to use the merged
1264 // constant pool.
1265 jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
1266 instanceKlassHandle the_class, instanceKlassHandle scratch_class,
1267 TRAPS) {
1268 // worst case merged constant pool length is old and new combined
1269 int merge_cp_length = the_class->constants()->length()
1270 + scratch_class->constants()->length();
1272 // Constant pools are not easily reused so we allocate a new one
1273 // each time.
1274 // merge_cp is created unsafe for concurrent GC processing. It
1275 // should be marked safe before discarding it. Even though
1276 // garbage, if it crosses a card boundary, it may be scanned
1277 // in order to find the start of the first complete object on the card.
1278 ClassLoaderData* loader_data = the_class->class_loader_data();
1279 ConstantPool* merge_cp_oop =
1280 ConstantPool::allocate(loader_data,
1281 merge_cp_length,
1282 THREAD);
1283 MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
1285 HandleMark hm(THREAD); // make sure handles are cleared before
1286 // MergeCPCleaner clears out merge_cp_oop
1287 constantPoolHandle merge_cp(THREAD, merge_cp_oop);
1289 // Get constants() from the old class because it could have been rewritten
1290 // while we were at a safepoint allocating a new constant pool.
1291 constantPoolHandle old_cp(THREAD, the_class->constants());
1292 constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
1294 // If the length changed, the class was redefined out from under us. Return
1295 // an error.
1296 if (merge_cp_length != the_class->constants()->length()
1297 + scratch_class->constants()->length()) {
1298 return JVMTI_ERROR_INTERNAL;
1299 }
1301 // Update the version number of the constant pool
1302 merge_cp->increment_and_save_version(old_cp->version());
1304 ResourceMark rm(THREAD);
1305 _index_map_count = 0;
1306 _index_map_p = new intArray(scratch_cp->length(), -1);
1308 bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
1309 &merge_cp_length, THREAD);
1310 if (!result) {
1311 // The merge can fail due to memory allocation failure or due
1312 // to robustness checks.
1313 return JVMTI_ERROR_INTERNAL;
1314 }
1316 RC_TRACE_WITH_THREAD(0x00010000, THREAD,
1317 ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
1319 if (_index_map_count == 0) {
1320 // there is nothing to map between the new and merged constant pools
1322 if (old_cp->length() == scratch_cp->length()) {
1323 // The old and new constant pools are the same length and the
1324 // index map is empty. This means that the three constant pools
1325 // are equivalent (but not the same). Unfortunately, the new
1326 // constant pool has not gone through link resolution nor have
1327 // the new class bytecodes gone through constant pool cache
1328 // rewriting so we can't use the old constant pool with the new
1329 // class.
1331 // toss the merged constant pool at return
1332 } else if (old_cp->length() < scratch_cp->length()) {
1333 // The old constant pool has fewer entries than the new constant
1334 // pool and the index map is empty. This means the new constant
1335 // pool is a superset of the old constant pool. However, the old
1336 // class bytecodes have already gone through constant pool cache
1337 // rewriting so we can't use the new constant pool with the old
1338 // class.
1340 // toss the merged constant pool at return
1341 } else {
1342 // The old constant pool has more entries than the new constant
1343 // pool and the index map is empty. This means that both the old
1344 // and merged constant pools are supersets of the new constant
1345 // pool.
1347 // Replace the new constant pool with a shrunken copy of the
1348 // merged constant pool
1349 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length, THREAD);
1350 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1351 // It can't be cleaned up while there are handles to it.
1352 cp_cleaner.add_scratch_cp(scratch_cp());
1353 }
1354 } else {
1355 if (RC_TRACE_ENABLED(0x00040000)) {
1356 // don't want to loop unless we are tracing
1357 int count = 0;
1358 for (int i = 1; i < _index_map_p->length(); i++) {
1359 int value = _index_map_p->at(i);
1361 if (value != -1) {
1362 RC_TRACE_WITH_THREAD(0x00040000, THREAD,
1363 ("index_map[%d]: old=%d new=%d", count, i, value));
1364 count++;
1365 }
1366 }
1367 }
1369 // We have entries mapped between the new and merged constant pools
1370 // so we have to rewrite some constant pool references.
1371 if (!rewrite_cp_refs(scratch_class, THREAD)) {
1372 return JVMTI_ERROR_INTERNAL;
1373 }
1375 // Replace the new constant pool with a shrunken copy of the
1376 // merged constant pool so now the rewritten bytecodes have
1377 // valid references; the previous new constant pool will get
1378 // GCed.
1379 set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length, THREAD);
1380 // The new constant pool replaces scratch_cp so have cleaner clean it up.
1381 // It can't be cleaned up while there are handles to it.
1382 cp_cleaner.add_scratch_cp(scratch_cp());
1383 }
1385 return JVMTI_ERROR_NONE;
1386 } // end merge_cp_and_rewrite()
1389 // Rewrite constant pool references in klass scratch_class.
1390 bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
1391 TRAPS) {
1393 // rewrite constant pool references in the methods:
1394 if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
1395 // propagate failure back to caller
1396 return false;
1397 }
1399 // rewrite constant pool references in the class_annotations:
1400 if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
1401 // propagate failure back to caller
1402 return false;
1403 }
1405 // rewrite constant pool references in the fields_annotations:
1406 if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
1407 // propagate failure back to caller
1408 return false;
1409 }
1411 // rewrite constant pool references in the methods_annotations:
1412 if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
1413 // propagate failure back to caller
1414 return false;
1415 }
1417 // rewrite constant pool references in the methods_parameter_annotations:
1418 if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
1419 THREAD)) {
1420 // propagate failure back to caller
1421 return false;
1422 }
1424 // rewrite constant pool references in the methods_default_annotations:
1425 if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
1426 THREAD)) {
1427 // propagate failure back to caller
1428 return false;
1429 }
1431 return true;
1432 } // end rewrite_cp_refs()
1435 // Rewrite constant pool references in the methods.
1436 bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
1437 instanceKlassHandle scratch_class, TRAPS) {
1439 Array<Method*>* methods = scratch_class->methods();
1441 if (methods == NULL || methods->length() == 0) {
1442 // no methods so nothing to do
1443 return true;
1444 }
1446 // rewrite constant pool references in the methods:
1447 for (int i = methods->length() - 1; i >= 0; i--) {
1448 methodHandle method(THREAD, methods->at(i));
1449 methodHandle new_method;
1450 rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
1451 if (!new_method.is_null()) {
1452 // the method has been replaced so save the new method version
1453 methods->at_put(i, new_method());
1454 }
1455 }
1457 return true;
1458 }
1461 // Rewrite constant pool references in the specific method. This code
1462 // was adapted from Rewriter::rewrite_method().
1463 void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
1464 methodHandle *new_method_p, TRAPS) {
1466 *new_method_p = methodHandle(); // default is no new method
1468 // We cache a pointer to the bytecodes here in code_base. If GC
1469 // moves the Method*, then the bytecodes will also move which
1470 // will likely cause a crash. We create a No_Safepoint_Verifier
1471 // object to detect whether we pass a possible safepoint in this
1472 // code block.
1473 No_Safepoint_Verifier nsv;
1475 // Bytecodes and their length
1476 address code_base = method->code_base();
1477 int code_length = method->code_size();
1479 int bc_length;
1480 for (int bci = 0; bci < code_length; bci += bc_length) {
1481 address bcp = code_base + bci;
1482 Bytecodes::Code c = (Bytecodes::Code)(*bcp);
1484 bc_length = Bytecodes::length_for(c);
1485 if (bc_length == 0) {
1486 // More complicated bytecodes report a length of zero so
1487 // we have to try again a slightly different way.
1488 bc_length = Bytecodes::length_at(method(), bcp);
1489 }
1491 assert(bc_length != 0, "impossible bytecode length");
1493 switch (c) {
1494 case Bytecodes::_ldc:
1495 {
1496 int cp_index = *(bcp + 1);
1497 int new_index = find_new_index(cp_index);
1499 if (StressLdcRewrite && new_index == 0) {
1500 // If we are stressing ldc -> ldc_w rewriting, then we
1501 // always need a new_index value.
1502 new_index = cp_index;
1503 }
1504 if (new_index != 0) {
1505 // the original index is mapped so we have more work to do
1506 if (!StressLdcRewrite && new_index <= max_jubyte) {
1507 // The new value can still use ldc instead of ldc_w
1508 // unless we are trying to stress ldc -> ldc_w rewriting
1509 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1510 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1511 bcp, cp_index, new_index));
1512 *(bcp + 1) = new_index;
1513 } else {
1514 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1515 ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
1516 Bytecodes::name(c), bcp, cp_index, new_index));
1517 // the new value needs ldc_w instead of ldc
1518 u_char inst_buffer[4]; // max instruction size is 4 bytes
1519 bcp = (address)inst_buffer;
1520 // construct new instruction sequence
1521 *bcp = Bytecodes::_ldc_w;
1522 bcp++;
1523 // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
1524 // See comment below for difference between put_Java_u2()
1525 // and put_native_u2().
1526 Bytes::put_Java_u2(bcp, new_index);
1528 Relocator rc(method, NULL /* no RelocatorListener needed */);
1529 methodHandle m;
1530 {
1531 Pause_No_Safepoint_Verifier pnsv(&nsv);
1533 // ldc is 2 bytes and ldc_w is 3 bytes
1534 m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
1535 if (m.is_null() || HAS_PENDING_EXCEPTION) {
1536 guarantee(false, "insert_space_at() failed");
1537 }
1538 }
1540 // return the new method so that the caller can update
1541 // the containing class
1542 *new_method_p = method = m;
1543 // switch our bytecode processing loop from the old method
1544 // to the new method
1545 code_base = method->code_base();
1546 code_length = method->code_size();
1547 bcp = code_base + bci;
1548 c = (Bytecodes::Code)(*bcp);
1549 bc_length = Bytecodes::length_for(c);
1550 assert(bc_length != 0, "sanity check");
1551 } // end we need ldc_w instead of ldc
1552 } // end if there is a mapped index
1553 } break;
1555 // these bytecodes have a two-byte constant pool index
1556 case Bytecodes::_anewarray : // fall through
1557 case Bytecodes::_checkcast : // fall through
1558 case Bytecodes::_getfield : // fall through
1559 case Bytecodes::_getstatic : // fall through
1560 case Bytecodes::_instanceof : // fall through
1561 case Bytecodes::_invokeinterface: // fall through
1562 case Bytecodes::_invokespecial : // fall through
1563 case Bytecodes::_invokestatic : // fall through
1564 case Bytecodes::_invokevirtual : // fall through
1565 case Bytecodes::_ldc_w : // fall through
1566 case Bytecodes::_ldc2_w : // fall through
1567 case Bytecodes::_multianewarray : // fall through
1568 case Bytecodes::_new : // fall through
1569 case Bytecodes::_putfield : // fall through
1570 case Bytecodes::_putstatic :
1571 {
1572 address p = bcp + 1;
1573 int cp_index = Bytes::get_Java_u2(p);
1574 int new_index = find_new_index(cp_index);
1575 if (new_index != 0) {
1576 // the original index is mapped so update w/ new value
1577 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
1578 ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
1579 bcp, cp_index, new_index));
1580 // Rewriter::rewrite_method() uses put_native_u2() in this
1581 // situation because it is reusing the constant pool index
1582 // location for a native index into the constantPoolCache.
1583 // Since we are updating the constant pool index prior to
1584 // verification and constantPoolCache initialization, we
1585 // need to keep the new index in Java byte order.
1586 Bytes::put_Java_u2(p, new_index);
1587 }
1588 } break;
1589 }
1590 } // end for each bytecode
1591 } // end rewrite_cp_refs_in_method()
1594 // Rewrite constant pool references in the class_annotations field.
1595 bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
1596 instanceKlassHandle scratch_class, TRAPS) {
1598 AnnotationArray* class_annotations = scratch_class->class_annotations();
1599 if (class_annotations == NULL || class_annotations->length() == 0) {
1600 // no class_annotations so nothing to do
1601 return true;
1602 }
1604 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1605 ("class_annotations length=%d", class_annotations->length()));
1607 int byte_i = 0; // byte index into class_annotations
1608 return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
1609 THREAD);
1610 }
1613 // Rewrite constant pool references in an annotations typeArray. This
1614 // "structure" is adapted from the RuntimeVisibleAnnotations_attribute
1615 // that is described in section 4.8.15 of the 2nd-edition of the VM spec:
1616 //
1617 // annotations_typeArray {
1618 // u2 num_annotations;
1619 // annotation annotations[num_annotations];
1620 // }
1621 //
1622 bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
1623 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1625 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1626 // not enough room for num_annotations field
1627 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1628 ("length() is too small for num_annotations field"));
1629 return false;
1630 }
1632 u2 num_annotations = Bytes::get_Java_u2((address)
1633 annotations_typeArray->adr_at(byte_i_ref));
1634 byte_i_ref += 2;
1636 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1637 ("num_annotations=%d", num_annotations));
1639 int calc_num_annotations = 0;
1640 for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
1641 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1642 byte_i_ref, THREAD)) {
1643 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1644 ("bad annotation_struct at %d", calc_num_annotations));
1645 // propagate failure back to caller
1646 return false;
1647 }
1648 }
1649 assert(num_annotations == calc_num_annotations, "sanity check");
1651 return true;
1652 } // end rewrite_cp_refs_in_annotations_typeArray()
1655 // Rewrite constant pool references in the annotation struct portion of
1656 // an annotations_typeArray. This "structure" is from section 4.8.15 of
1657 // the 2nd-edition of the VM spec:
1658 //
1659 // struct annotation {
1660 // u2 type_index;
1661 // u2 num_element_value_pairs;
1662 // {
1663 // u2 element_name_index;
1664 // element_value value;
1665 // } element_value_pairs[num_element_value_pairs];
1666 // }
1667 //
1668 bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
1669 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1670 if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
1671 // not enough room for smallest annotation_struct
1672 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1673 ("length() is too small for annotation_struct"));
1674 return false;
1675 }
1677 u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
1678 byte_i_ref, "mapped old type_index=%d", THREAD);
1680 u2 num_element_value_pairs = Bytes::get_Java_u2((address)
1681 annotations_typeArray->adr_at(byte_i_ref));
1682 byte_i_ref += 2;
1684 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1685 ("type_index=%d num_element_value_pairs=%d", type_index,
1686 num_element_value_pairs));
1688 int calc_num_element_value_pairs = 0;
1689 for (; calc_num_element_value_pairs < num_element_value_pairs;
1690 calc_num_element_value_pairs++) {
1691 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1692 // not enough room for another element_name_index, let alone
1693 // the rest of another component
1694 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1695 ("length() is too small for element_name_index"));
1696 return false;
1697 }
1699 u2 element_name_index = rewrite_cp_ref_in_annotation_data(
1700 annotations_typeArray, byte_i_ref,
1701 "mapped old element_name_index=%d", THREAD);
1703 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1704 ("element_name_index=%d", element_name_index));
1706 if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
1707 byte_i_ref, THREAD)) {
1708 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1709 ("bad element_value at %d", calc_num_element_value_pairs));
1710 // propagate failure back to caller
1711 return false;
1712 }
1713 } // end for each component
1714 assert(num_element_value_pairs == calc_num_element_value_pairs,
1715 "sanity check");
1717 return true;
1718 } // end rewrite_cp_refs_in_annotation_struct()
1721 // Rewrite a constant pool reference at the current position in
1722 // annotations_typeArray if needed. Returns the original constant
1723 // pool reference if a rewrite was not needed or the new constant
1724 // pool reference if a rewrite was needed.
1725 u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
1726 AnnotationArray* annotations_typeArray, int &byte_i_ref,
1727 const char * trace_mesg, TRAPS) {
1729 address cp_index_addr = (address)
1730 annotations_typeArray->adr_at(byte_i_ref);
1731 u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
1732 u2 new_cp_index = find_new_index(old_cp_index);
1733 if (new_cp_index != 0) {
1734 RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
1735 Bytes::put_Java_u2(cp_index_addr, new_cp_index);
1736 old_cp_index = new_cp_index;
1737 }
1738 byte_i_ref += 2;
1739 return old_cp_index;
1740 }
1743 // Rewrite constant pool references in the element_value portion of an
1744 // annotations_typeArray. This "structure" is from section 4.8.15.1 of
1745 // the 2nd-edition of the VM spec:
1746 //
1747 // struct element_value {
1748 // u1 tag;
1749 // union {
1750 // u2 const_value_index;
1751 // {
1752 // u2 type_name_index;
1753 // u2 const_name_index;
1754 // } enum_const_value;
1755 // u2 class_info_index;
1756 // annotation annotation_value;
1757 // struct {
1758 // u2 num_values;
1759 // element_value values[num_values];
1760 // } array_value;
1761 // } value;
1762 // }
1763 //
1764 bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
1765 AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
1767 if ((byte_i_ref + 1) > annotations_typeArray->length()) {
1768 // not enough room for a tag let alone the rest of an element_value
1769 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1770 ("length() is too small for a tag"));
1771 return false;
1772 }
1774 u1 tag = annotations_typeArray->at(byte_i_ref);
1775 byte_i_ref++;
1776 RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
1778 switch (tag) {
1779 // These BaseType tag values are from Table 4.2 in VM spec:
1780 case 'B': // byte
1781 case 'C': // char
1782 case 'D': // double
1783 case 'F': // float
1784 case 'I': // int
1785 case 'J': // long
1786 case 'S': // short
1787 case 'Z': // boolean
1789 // The remaining tag values are from Table 4.8 in the 2nd-edition of
1790 // the VM spec:
1791 case 's':
1792 {
1793 // For the above tag values (including the BaseType values),
1794 // value.const_value_index is right union field.
1796 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1797 // not enough room for a const_value_index
1798 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1799 ("length() is too small for a const_value_index"));
1800 return false;
1801 }
1803 u2 const_value_index = rewrite_cp_ref_in_annotation_data(
1804 annotations_typeArray, byte_i_ref,
1805 "mapped old const_value_index=%d", THREAD);
1807 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1808 ("const_value_index=%d", const_value_index));
1809 } break;
1811 case 'e':
1812 {
1813 // for the above tag value, value.enum_const_value is right union field
1815 if ((byte_i_ref + 4) > annotations_typeArray->length()) {
1816 // not enough room for a enum_const_value
1817 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1818 ("length() is too small for a enum_const_value"));
1819 return false;
1820 }
1822 u2 type_name_index = rewrite_cp_ref_in_annotation_data(
1823 annotations_typeArray, byte_i_ref,
1824 "mapped old type_name_index=%d", THREAD);
1826 u2 const_name_index = rewrite_cp_ref_in_annotation_data(
1827 annotations_typeArray, byte_i_ref,
1828 "mapped old const_name_index=%d", THREAD);
1830 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1831 ("type_name_index=%d const_name_index=%d", type_name_index,
1832 const_name_index));
1833 } break;
1835 case 'c':
1836 {
1837 // for the above tag value, value.class_info_index is right union field
1839 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1840 // not enough room for a class_info_index
1841 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1842 ("length() is too small for a class_info_index"));
1843 return false;
1844 }
1846 u2 class_info_index = rewrite_cp_ref_in_annotation_data(
1847 annotations_typeArray, byte_i_ref,
1848 "mapped old class_info_index=%d", THREAD);
1850 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1851 ("class_info_index=%d", class_info_index));
1852 } break;
1854 case '@':
1855 // For the above tag value, value.attr_value is the right union
1856 // field. This is a nested annotation.
1857 if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
1858 byte_i_ref, THREAD)) {
1859 // propagate failure back to caller
1860 return false;
1861 }
1862 break;
1864 case '[':
1865 {
1866 if ((byte_i_ref + 2) > annotations_typeArray->length()) {
1867 // not enough room for a num_values field
1868 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1869 ("length() is too small for a num_values field"));
1870 return false;
1871 }
1873 // For the above tag value, value.array_value is the right union
1874 // field. This is an array of nested element_value.
1875 u2 num_values = Bytes::get_Java_u2((address)
1876 annotations_typeArray->adr_at(byte_i_ref));
1877 byte_i_ref += 2;
1878 RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
1880 int calc_num_values = 0;
1881 for (; calc_num_values < num_values; calc_num_values++) {
1882 if (!rewrite_cp_refs_in_element_value(
1883 annotations_typeArray, byte_i_ref, THREAD)) {
1884 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1885 ("bad nested element_value at %d", calc_num_values));
1886 // propagate failure back to caller
1887 return false;
1888 }
1889 }
1890 assert(num_values == calc_num_values, "sanity check");
1891 } break;
1893 default:
1894 RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
1895 return false;
1896 } // end decode tag field
1898 return true;
1899 } // end rewrite_cp_refs_in_element_value()
1902 // Rewrite constant pool references in a fields_annotations field.
1903 bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
1904 instanceKlassHandle scratch_class, TRAPS) {
1906 Annotations* sca = scratch_class->annotations();
1907 if (sca == NULL) return true;
1909 Array<AnnotationArray*>* fields_annotations = sca->fields_annotations();
1911 if (fields_annotations == NULL || fields_annotations->length() == 0) {
1912 // no fields_annotations so nothing to do
1913 return true;
1914 }
1916 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1917 ("fields_annotations length=%d", fields_annotations->length()));
1919 for (int i = 0; i < fields_annotations->length(); i++) {
1920 AnnotationArray* field_annotations = fields_annotations->at(i);
1921 if (field_annotations == NULL || field_annotations->length() == 0) {
1922 // this field does not have any annotations so skip it
1923 continue;
1924 }
1926 int byte_i = 0; // byte index into field_annotations
1927 if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
1928 THREAD)) {
1929 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1930 ("bad field_annotations at %d", i));
1931 // propagate failure back to caller
1932 return false;
1933 }
1934 }
1936 return true;
1937 } // end rewrite_cp_refs_in_fields_annotations()
1940 // Rewrite constant pool references in a methods_annotations field.
1941 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
1942 instanceKlassHandle scratch_class, TRAPS) {
1944 Annotations* sca = scratch_class->annotations();
1945 if (sca == NULL) return true;
1947 Array<AnnotationArray*>* methods_annotations = sca->methods_annotations();
1949 if (methods_annotations == NULL || methods_annotations->length() == 0) {
1950 // no methods_annotations so nothing to do
1951 return true;
1952 }
1954 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1955 ("methods_annotations length=%d", methods_annotations->length()));
1957 for (int i = 0; i < methods_annotations->length(); i++) {
1958 AnnotationArray* method_annotations = methods_annotations->at(i);
1959 if (method_annotations == NULL || method_annotations->length() == 0) {
1960 // this method does not have any annotations so skip it
1961 continue;
1962 }
1964 int byte_i = 0; // byte index into method_annotations
1965 if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
1966 THREAD)) {
1967 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
1968 ("bad method_annotations at %d", i));
1969 // propagate failure back to caller
1970 return false;
1971 }
1972 }
1974 return true;
1975 } // end rewrite_cp_refs_in_methods_annotations()
1978 // Rewrite constant pool references in a methods_parameter_annotations
1979 // field. This "structure" is adapted from the
1980 // RuntimeVisibleParameterAnnotations_attribute described in section
1981 // 4.8.17 of the 2nd-edition of the VM spec:
1982 //
1983 // methods_parameter_annotations_typeArray {
1984 // u1 num_parameters;
1985 // {
1986 // u2 num_annotations;
1987 // annotation annotations[num_annotations];
1988 // } parameter_annotations[num_parameters];
1989 // }
1990 //
1991 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
1992 instanceKlassHandle scratch_class, TRAPS) {
1994 Annotations* sca = scratch_class->annotations();
1995 if (sca == NULL) return true;
1997 Array<AnnotationArray*>* methods_parameter_annotations =
1998 sca->methods_parameter_annotations();
2000 if (methods_parameter_annotations == NULL
2001 || methods_parameter_annotations->length() == 0) {
2002 // no methods_parameter_annotations so nothing to do
2003 return true;
2004 }
2006 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2007 ("methods_parameter_annotations length=%d",
2008 methods_parameter_annotations->length()));
2010 for (int i = 0; i < methods_parameter_annotations->length(); i++) {
2011 AnnotationArray* method_parameter_annotations = methods_parameter_annotations->at(i);
2012 if (method_parameter_annotations == NULL
2013 || method_parameter_annotations->length() == 0) {
2014 // this method does not have any parameter annotations so skip it
2015 continue;
2016 }
2018 if (method_parameter_annotations->length() < 1) {
2019 // not enough room for a num_parameters field
2020 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2021 ("length() is too small for a num_parameters field at %d", i));
2022 return false;
2023 }
2025 int byte_i = 0; // byte index into method_parameter_annotations
2027 u1 num_parameters = method_parameter_annotations->at(byte_i);
2028 byte_i++;
2030 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2031 ("num_parameters=%d", num_parameters));
2033 int calc_num_parameters = 0;
2034 for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
2035 if (!rewrite_cp_refs_in_annotations_typeArray(
2036 method_parameter_annotations, byte_i, THREAD)) {
2037 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2038 ("bad method_parameter_annotations at %d", calc_num_parameters));
2039 // propagate failure back to caller
2040 return false;
2041 }
2042 }
2043 assert(num_parameters == calc_num_parameters, "sanity check");
2044 }
2046 return true;
2047 } // end rewrite_cp_refs_in_methods_parameter_annotations()
2050 // Rewrite constant pool references in a methods_default_annotations
2051 // field. This "structure" is adapted from the AnnotationDefault_attribute
2052 // that is described in section 4.8.19 of the 2nd-edition of the VM spec:
2053 //
2054 // methods_default_annotations_typeArray {
2055 // element_value default_value;
2056 // }
2057 //
2058 bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
2059 instanceKlassHandle scratch_class, TRAPS) {
2061 Annotations* sca = scratch_class->annotations();
2062 if (sca == NULL) return true;
2064 Array<AnnotationArray*>* methods_default_annotations =
2065 sca->methods_default_annotations();
2067 if (methods_default_annotations == NULL
2068 || methods_default_annotations->length() == 0) {
2069 // no methods_default_annotations so nothing to do
2070 return true;
2071 }
2073 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2074 ("methods_default_annotations length=%d",
2075 methods_default_annotations->length()));
2077 for (int i = 0; i < methods_default_annotations->length(); i++) {
2078 AnnotationArray* method_default_annotations = methods_default_annotations->at(i);
2079 if (method_default_annotations == NULL
2080 || method_default_annotations->length() == 0) {
2081 // this method does not have any default annotations so skip it
2082 continue;
2083 }
2085 int byte_i = 0; // byte index into method_default_annotations
2087 if (!rewrite_cp_refs_in_element_value(
2088 method_default_annotations, byte_i, THREAD)) {
2089 RC_TRACE_WITH_THREAD(0x02000000, THREAD,
2090 ("bad default element_value at %d", i));
2091 // propagate failure back to caller
2092 return false;
2093 }
2094 }
2096 return true;
2097 } // end rewrite_cp_refs_in_methods_default_annotations()
2100 // Rewrite constant pool references in the method's stackmap table.
2101 // These "structures" are adapted from the StackMapTable_attribute that
2102 // is described in section 4.8.4 of the 6.0 version of the VM spec
2103 // (dated 2005.10.26):
2104 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2105 //
2106 // stack_map {
2107 // u2 number_of_entries;
2108 // stack_map_frame entries[number_of_entries];
2109 // }
2110 //
2111 void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
2112 methodHandle method, TRAPS) {
2114 if (!method->has_stackmap_table()) {
2115 return;
2116 }
2118 AnnotationArray* stackmap_data = method->stackmap_data();
2119 address stackmap_p = (address)stackmap_data->adr_at(0);
2120 address stackmap_end = stackmap_p + stackmap_data->length();
2122 assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
2123 u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
2124 stackmap_p += 2;
2126 RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2127 ("number_of_entries=%u", number_of_entries));
2129 // walk through each stack_map_frame
2130 u2 calc_number_of_entries = 0;
2131 for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
2132 // The stack_map_frame structure is a u1 frame_type followed by
2133 // 0 or more bytes of data:
2134 //
2135 // union stack_map_frame {
2136 // same_frame;
2137 // same_locals_1_stack_item_frame;
2138 // same_locals_1_stack_item_frame_extended;
2139 // chop_frame;
2140 // same_frame_extended;
2141 // append_frame;
2142 // full_frame;
2143 // }
2145 assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
2146 // The Linux compiler does not like frame_type to be u1 or u2. It
2147 // issues the following warning for the first if-statement below:
2148 //
2149 // "warning: comparison is always true due to limited range of data type"
2150 //
2151 u4 frame_type = *stackmap_p;
2152 stackmap_p++;
2154 // same_frame {
2155 // u1 frame_type = SAME; /* 0-63 */
2156 // }
2157 if (frame_type >= 0 && frame_type <= 63) {
2158 // nothing more to do for same_frame
2159 }
2161 // same_locals_1_stack_item_frame {
2162 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
2163 // verification_type_info stack[1];
2164 // }
2165 else if (frame_type >= 64 && frame_type <= 127) {
2166 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2167 calc_number_of_entries, frame_type, THREAD);
2168 }
2170 // reserved for future use
2171 else if (frame_type >= 128 && frame_type <= 246) {
2172 // nothing more to do for reserved frame_types
2173 }
2175 // same_locals_1_stack_item_frame_extended {
2176 // u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
2177 // u2 offset_delta;
2178 // verification_type_info stack[1];
2179 // }
2180 else if (frame_type == 247) {
2181 stackmap_p += 2;
2182 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2183 calc_number_of_entries, frame_type, THREAD);
2184 }
2186 // chop_frame {
2187 // u1 frame_type = CHOP; /* 248-250 */
2188 // u2 offset_delta;
2189 // }
2190 else if (frame_type >= 248 && frame_type <= 250) {
2191 stackmap_p += 2;
2192 }
2194 // same_frame_extended {
2195 // u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
2196 // u2 offset_delta;
2197 // }
2198 else if (frame_type == 251) {
2199 stackmap_p += 2;
2200 }
2202 // append_frame {
2203 // u1 frame_type = APPEND; /* 252-254 */
2204 // u2 offset_delta;
2205 // verification_type_info locals[frame_type - 251];
2206 // }
2207 else if (frame_type >= 252 && frame_type <= 254) {
2208 assert(stackmap_p + 2 <= stackmap_end,
2209 "no room for offset_delta");
2210 stackmap_p += 2;
2211 u1 len = frame_type - 251;
2212 for (u1 i = 0; i < len; i++) {
2213 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2214 calc_number_of_entries, frame_type, THREAD);
2215 }
2216 }
2218 // full_frame {
2219 // u1 frame_type = FULL_FRAME; /* 255 */
2220 // u2 offset_delta;
2221 // u2 number_of_locals;
2222 // verification_type_info locals[number_of_locals];
2223 // u2 number_of_stack_items;
2224 // verification_type_info stack[number_of_stack_items];
2225 // }
2226 else if (frame_type == 255) {
2227 assert(stackmap_p + 2 + 2 <= stackmap_end,
2228 "no room for smallest full_frame");
2229 stackmap_p += 2;
2231 u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
2232 stackmap_p += 2;
2234 for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
2235 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2236 calc_number_of_entries, frame_type, THREAD);
2237 }
2239 // Use the largest size for the number_of_stack_items, but only get
2240 // the right number of bytes.
2241 u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
2242 stackmap_p += 2;
2244 for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
2245 rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
2246 calc_number_of_entries, frame_type, THREAD);
2247 }
2248 }
2249 } // end while there is a stack_map_frame
2250 assert(number_of_entries == calc_number_of_entries, "sanity check");
2251 } // end rewrite_cp_refs_in_stack_map_table()
2254 // Rewrite constant pool references in the verification type info
2255 // portion of the method's stackmap table. These "structures" are
2256 // adapted from the StackMapTable_attribute that is described in
2257 // section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
2258 // file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
2259 //
2260 // The verification_type_info structure is a u1 tag followed by 0 or
2261 // more bytes of data:
2262 //
2263 // union verification_type_info {
2264 // Top_variable_info;
2265 // Integer_variable_info;
2266 // Float_variable_info;
2267 // Long_variable_info;
2268 // Double_variable_info;
2269 // Null_variable_info;
2270 // UninitializedThis_variable_info;
2271 // Object_variable_info;
2272 // Uninitialized_variable_info;
2273 // }
2274 //
2275 void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
2276 address& stackmap_p_ref, address stackmap_end, u2 frame_i,
2277 u1 frame_type, TRAPS) {
2279 assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
2280 u1 tag = *stackmap_p_ref;
2281 stackmap_p_ref++;
2283 switch (tag) {
2284 // Top_variable_info {
2285 // u1 tag = ITEM_Top; /* 0 */
2286 // }
2287 // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
2288 case 0: // fall through
2290 // Integer_variable_info {
2291 // u1 tag = ITEM_Integer; /* 1 */
2292 // }
2293 case ITEM_Integer: // fall through
2295 // Float_variable_info {
2296 // u1 tag = ITEM_Float; /* 2 */
2297 // }
2298 case ITEM_Float: // fall through
2300 // Double_variable_info {
2301 // u1 tag = ITEM_Double; /* 3 */
2302 // }
2303 case ITEM_Double: // fall through
2305 // Long_variable_info {
2306 // u1 tag = ITEM_Long; /* 4 */
2307 // }
2308 case ITEM_Long: // fall through
2310 // Null_variable_info {
2311 // u1 tag = ITEM_Null; /* 5 */
2312 // }
2313 case ITEM_Null: // fall through
2315 // UninitializedThis_variable_info {
2316 // u1 tag = ITEM_UninitializedThis; /* 6 */
2317 // }
2318 case ITEM_UninitializedThis:
2319 // nothing more to do for the above tag types
2320 break;
2322 // Object_variable_info {
2323 // u1 tag = ITEM_Object; /* 7 */
2324 // u2 cpool_index;
2325 // }
2326 case ITEM_Object:
2327 {
2328 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
2329 u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
2330 u2 new_cp_index = find_new_index(cpool_index);
2331 if (new_cp_index != 0) {
2332 RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2333 ("mapped old cpool_index=%d", cpool_index));
2334 Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
2335 cpool_index = new_cp_index;
2336 }
2337 stackmap_p_ref += 2;
2339 RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2340 ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
2341 frame_type, cpool_index));
2342 } break;
2344 // Uninitialized_variable_info {
2345 // u1 tag = ITEM_Uninitialized; /* 8 */
2346 // u2 offset;
2347 // }
2348 case ITEM_Uninitialized:
2349 assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
2350 stackmap_p_ref += 2;
2351 break;
2353 default:
2354 RC_TRACE_WITH_THREAD(0x04000000, THREAD,
2355 ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
2356 ShouldNotReachHere();
2357 break;
2358 } // end switch (tag)
2359 } // end rewrite_cp_refs_in_verification_type_info()
2362 // Change the constant pool associated with klass scratch_class to
2363 // scratch_cp. If shrink is true, then scratch_cp_length elements
2364 // are copied from scratch_cp to a smaller constant pool and the
2365 // smaller constant pool is associated with scratch_class.
2366 void VM_RedefineClasses::set_new_constant_pool(
2367 ClassLoaderData* loader_data,
2368 instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
2369 int scratch_cp_length, TRAPS) {
2370 assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
2372 // scratch_cp is a merged constant pool and has enough space for a
2373 // worst case merge situation. We want to associate the minimum
2374 // sized constant pool with the klass to save space.
2375 constantPoolHandle smaller_cp(THREAD,
2376 ConstantPool::allocate(loader_data, scratch_cp_length, THREAD));
2378 // preserve version() value in the smaller copy
2379 int version = scratch_cp->version();
2380 assert(version != 0, "sanity check");
2381 smaller_cp->set_version(version);
2383 scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
2384 scratch_cp = smaller_cp;
2386 // attach new constant pool to klass
2387 scratch_cp->set_pool_holder(scratch_class());
2389 // attach klass to new constant pool
2390 scratch_class->set_constants(scratch_cp());
2392 int i; // for portability
2394 // update each field in klass to use new constant pool indices as needed
2395 for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
2396 jshort cur_index = fs.name_index();
2397 jshort new_index = find_new_index(cur_index);
2398 if (new_index != 0) {
2399 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2400 ("field-name_index change: %d to %d", cur_index, new_index));
2401 fs.set_name_index(new_index);
2402 }
2403 cur_index = fs.signature_index();
2404 new_index = find_new_index(cur_index);
2405 if (new_index != 0) {
2406 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2407 ("field-signature_index change: %d to %d", cur_index, new_index));
2408 fs.set_signature_index(new_index);
2409 }
2410 cur_index = fs.initval_index();
2411 new_index = find_new_index(cur_index);
2412 if (new_index != 0) {
2413 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2414 ("field-initval_index change: %d to %d", cur_index, new_index));
2415 fs.set_initval_index(new_index);
2416 }
2417 cur_index = fs.generic_signature_index();
2418 new_index = find_new_index(cur_index);
2419 if (new_index != 0) {
2420 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2421 ("field-generic_signature change: %d to %d", cur_index, new_index));
2422 fs.set_generic_signature_index(new_index);
2423 }
2424 } // end for each field
2426 // Update constant pool indices in the inner classes info to use
2427 // new constant indices as needed. The inner classes info is a
2428 // quadruple:
2429 // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
2430 InnerClassesIterator iter(scratch_class);
2431 for (; !iter.done(); iter.next()) {
2432 int cur_index = iter.inner_class_info_index();
2433 if (cur_index == 0) {
2434 continue; // JVM spec. allows null inner class refs so skip it
2435 }
2436 int new_index = find_new_index(cur_index);
2437 if (new_index != 0) {
2438 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2439 ("inner_class_info change: %d to %d", cur_index, new_index));
2440 iter.set_inner_class_info_index(new_index);
2441 }
2442 cur_index = iter.outer_class_info_index();
2443 new_index = find_new_index(cur_index);
2444 if (new_index != 0) {
2445 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2446 ("outer_class_info change: %d to %d", cur_index, new_index));
2447 iter.set_outer_class_info_index(new_index);
2448 }
2449 cur_index = iter.inner_name_index();
2450 new_index = find_new_index(cur_index);
2451 if (new_index != 0) {
2452 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2453 ("inner_name change: %d to %d", cur_index, new_index));
2454 iter.set_inner_name_index(new_index);
2455 }
2456 } // end for each inner class
2458 // Attach each method in klass to the new constant pool and update
2459 // to use new constant pool indices as needed:
2460 Array<Method*>* methods = scratch_class->methods();
2461 for (i = methods->length() - 1; i >= 0; i--) {
2462 methodHandle method(THREAD, methods->at(i));
2463 method->set_constants(scratch_cp());
2465 int new_index = find_new_index(method->name_index());
2466 if (new_index != 0) {
2467 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2468 ("method-name_index change: %d to %d", method->name_index(),
2469 new_index));
2470 method->set_name_index(new_index);
2471 }
2472 new_index = find_new_index(method->signature_index());
2473 if (new_index != 0) {
2474 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2475 ("method-signature_index change: %d to %d",
2476 method->signature_index(), new_index));
2477 method->set_signature_index(new_index);
2478 }
2479 new_index = find_new_index(method->generic_signature_index());
2480 if (new_index != 0) {
2481 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2482 ("method-generic_signature_index change: %d to %d",
2483 method->generic_signature_index(), new_index));
2484 method->set_generic_signature_index(new_index);
2485 }
2487 // Update constant pool indices in the method's checked exception
2488 // table to use new constant indices as needed.
2489 int cext_length = method->checked_exceptions_length();
2490 if (cext_length > 0) {
2491 CheckedExceptionElement * cext_table =
2492 method->checked_exceptions_start();
2493 for (int j = 0; j < cext_length; j++) {
2494 int cur_index = cext_table[j].class_cp_index;
2495 int new_index = find_new_index(cur_index);
2496 if (new_index != 0) {
2497 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2498 ("cext-class_cp_index change: %d to %d", cur_index, new_index));
2499 cext_table[j].class_cp_index = (u2)new_index;
2500 }
2501 } // end for each checked exception table entry
2502 } // end if there are checked exception table entries
2504 // Update each catch type index in the method's exception table
2505 // to use new constant pool indices as needed. The exception table
2506 // holds quadruple entries of the form:
2507 // (beg_bci, end_bci, handler_bci, klass_index)
2509 ExceptionTable ex_table(method());
2510 int ext_length = ex_table.length();
2512 for (int j = 0; j < ext_length; j ++) {
2513 int cur_index = ex_table.catch_type_index(j);
2514 int new_index = find_new_index(cur_index);
2515 if (new_index != 0) {
2516 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2517 ("ext-klass_index change: %d to %d", cur_index, new_index));
2518 ex_table.set_catch_type_index(j, new_index);
2519 }
2520 } // end for each exception table entry
2522 // Update constant pool indices in the method's local variable
2523 // table to use new constant indices as needed. The local variable
2524 // table hold sextuple entries of the form:
2525 // (start_pc, length, name_index, descriptor_index, signature_index, slot)
2526 int lvt_length = method->localvariable_table_length();
2527 if (lvt_length > 0) {
2528 LocalVariableTableElement * lv_table =
2529 method->localvariable_table_start();
2530 for (int j = 0; j < lvt_length; j++) {
2531 int cur_index = lv_table[j].name_cp_index;
2532 int new_index = find_new_index(cur_index);
2533 if (new_index != 0) {
2534 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2535 ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
2536 lv_table[j].name_cp_index = (u2)new_index;
2537 }
2538 cur_index = lv_table[j].descriptor_cp_index;
2539 new_index = find_new_index(cur_index);
2540 if (new_index != 0) {
2541 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2542 ("lvt-descriptor_cp_index change: %d to %d", cur_index,
2543 new_index));
2544 lv_table[j].descriptor_cp_index = (u2)new_index;
2545 }
2546 cur_index = lv_table[j].signature_cp_index;
2547 new_index = find_new_index(cur_index);
2548 if (new_index != 0) {
2549 RC_TRACE_WITH_THREAD(0x00080000, THREAD,
2550 ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
2551 lv_table[j].signature_cp_index = (u2)new_index;
2552 }
2553 } // end for each local variable table entry
2554 } // end if there are local variable table entries
2556 rewrite_cp_refs_in_stack_map_table(method, THREAD);
2557 } // end for each method
2558 } // end set_new_constant_pool()
2561 void VM_RedefineClasses::adjust_array_vtable(Klass* k_oop) {
2562 ArrayKlass* ak = ArrayKlass::cast(k_oop);
2563 bool trace_name_printed = false;
2564 ak->vtable()->adjust_method_entries(_matching_old_methods,
2565 _matching_new_methods,
2566 _matching_methods_length,
2567 &trace_name_printed);
2568 }
2570 // Unevolving classes may point to methods of the_class directly
2571 // from their constant pool caches, itables, and/or vtables. We
2572 // use the SystemDictionary::classes_do() facility and this helper
2573 // to fix up these pointers.
2574 //
2575 // Note: We currently don't support updating the vtable in
2576 // arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
2577 void VM_RedefineClasses::adjust_cpool_cache_and_vtable(Klass* k_oop,
2578 ClassLoaderData* initiating_loader,
2579 TRAPS) {
2580 Klass *k = k_oop;
2581 if (k->oop_is_instance()) {
2582 HandleMark hm(THREAD);
2583 InstanceKlass *ik = (InstanceKlass *) k;
2585 // HotSpot specific optimization! HotSpot does not currently
2586 // support delegation from the bootstrap class loader to a
2587 // user-defined class loader. This means that if the bootstrap
2588 // class loader is the initiating class loader, then it will also
2589 // be the defining class loader. This also means that classes
2590 // loaded by the bootstrap class loader cannot refer to classes
2591 // loaded by a user-defined class loader. Note: a user-defined
2592 // class loader can delegate to the bootstrap class loader.
2593 //
2594 // If the current class being redefined has a user-defined class
2595 // loader as its defining class loader, then we can skip all
2596 // classes loaded by the bootstrap class loader.
2597 bool is_user_defined =
2598 InstanceKlass::cast(_the_class_oop)->class_loader() != NULL;
2599 if (is_user_defined && ik->class_loader() == NULL) {
2600 return;
2601 }
2603 // If the class being redefined is java.lang.Object, we need to fix all
2604 // array class vtables also
2605 if (_the_class_oop == SystemDictionary::Object_klass()) {
2606 ik->array_klasses_do(adjust_array_vtable);
2607 }
2609 // This is a very busy routine. We don't want too much tracing
2610 // printed out.
2611 bool trace_name_printed = false;
2613 // Very noisy: only enable this call if you are trying to determine
2614 // that a specific class gets found by this routine.
2615 // RC_TRACE macro has an embedded ResourceMark
2616 // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
2617 // ("adjust check: name=%s", ik->external_name()));
2618 // trace_name_printed = true;
2620 // Fix the vtable embedded in the_class and subclasses of the_class,
2621 // if one exists. We discard scratch_class and we don't keep an
2622 // InstanceKlass around to hold obsolete methods so we don't have
2623 // any other InstanceKlass embedded vtables to update. The vtable
2624 // holds the Method*s for virtual (but not final) methods.
2625 if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
2626 // ik->vtable() creates a wrapper object; rm cleans it up
2627 ResourceMark rm(THREAD);
2628 ik->vtable()->adjust_method_entries(_matching_old_methods,
2629 _matching_new_methods,
2630 _matching_methods_length,
2631 &trace_name_printed);
2632 }
2634 // If the current class has an itable and we are either redefining an
2635 // interface or if the current class is a subclass of the_class, then
2636 // we potentially have to fix the itable. If we are redefining an
2637 // interface, then we have to call adjust_method_entries() for
2638 // every InstanceKlass that has an itable since there isn't a
2639 // subclass relationship between an interface and an InstanceKlass.
2640 if (ik->itable_length() > 0 && (_the_class_oop->is_interface()
2641 || ik->is_subclass_of(_the_class_oop))) {
2642 // ik->itable() creates a wrapper object; rm cleans it up
2643 ResourceMark rm(THREAD);
2644 ik->itable()->adjust_method_entries(_matching_old_methods,
2645 _matching_new_methods,
2646 _matching_methods_length,
2647 &trace_name_printed);
2648 }
2650 // The constant pools in other classes (other_cp) can refer to
2651 // methods in the_class. We have to update method information in
2652 // other_cp's cache. If other_cp has a previous version, then we
2653 // have to repeat the process for each previous version. The
2654 // constant pool cache holds the Method*s for non-virtual
2655 // methods and for virtual, final methods.
2656 //
2657 // Special case: if the current class is the_class, then new_cp
2658 // has already been attached to the_class and old_cp has already
2659 // been added as a previous version. The new_cp doesn't have any
2660 // cached references to old methods so it doesn't need to be
2661 // updated. We can simply start with the previous version(s) in
2662 // that case.
2663 constantPoolHandle other_cp;
2664 ConstantPoolCache* cp_cache;
2666 if (k_oop != _the_class_oop) {
2667 // this klass' constant pool cache may need adjustment
2668 other_cp = constantPoolHandle(ik->constants());
2669 cp_cache = other_cp->cache();
2670 if (cp_cache != NULL) {
2671 cp_cache->adjust_method_entries(_matching_old_methods,
2672 _matching_new_methods,
2673 _matching_methods_length,
2674 &trace_name_printed);
2675 }
2676 }
2677 {
2678 ResourceMark rm(THREAD);
2679 // PreviousVersionInfo objects returned via PreviousVersionWalker
2680 // contain a GrowableArray of handles. We have to clean up the
2681 // GrowableArray _after_ the PreviousVersionWalker destructor
2682 // has destroyed the handles.
2683 {
2684 // the previous versions' constant pool caches may need adjustment
2685 PreviousVersionWalker pvw(ik);
2686 for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
2687 pv_info != NULL; pv_info = pvw.next_previous_version()) {
2688 other_cp = pv_info->prev_constant_pool_handle();
2689 cp_cache = other_cp->cache();
2690 if (cp_cache != NULL) {
2691 cp_cache->adjust_method_entries(_matching_old_methods,
2692 _matching_new_methods,
2693 _matching_methods_length,
2694 &trace_name_printed);
2695 }
2696 }
2697 } // pvw is cleaned up
2698 } // rm is cleaned up
2699 }
2700 }
2702 void VM_RedefineClasses::update_jmethod_ids() {
2703 for (int j = 0; j < _matching_methods_length; ++j) {
2704 Method* old_method = _matching_old_methods[j];
2705 jmethodID jmid = old_method->find_jmethod_id_or_null();
2706 if (jmid != NULL) {
2707 // There is a jmethodID, change it to point to the new method
2708 methodHandle new_method_h(_matching_new_methods[j]);
2709 Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
2710 assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
2711 "should be replaced");
2712 }
2713 }
2714 }
2716 void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
2717 BitMap *emcp_methods, int * emcp_method_count_p) {
2718 *emcp_method_count_p = 0;
2719 int obsolete_count = 0;
2720 int old_index = 0;
2721 for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
2722 Method* old_method = _matching_old_methods[j];
2723 Method* new_method = _matching_new_methods[j];
2724 Method* old_array_method;
2726 // Maintain an old_index into the _old_methods array by skipping
2727 // deleted methods
2728 while ((old_array_method = _old_methods->at(old_index)) != old_method) {
2729 ++old_index;
2730 }
2732 if (MethodComparator::methods_EMCP(old_method, new_method)) {
2733 // The EMCP definition from JSR-163 requires the bytecodes to be
2734 // the same with the exception of constant pool indices which may
2735 // differ. However, the constants referred to by those indices
2736 // must be the same.
2737 //
2738 // We use methods_EMCP() for comparison since constant pool
2739 // merging can remove duplicate constant pool entries that were
2740 // present in the old method and removed from the rewritten new
2741 // method. A faster binary comparison function would consider the
2742 // old and new methods to be different when they are actually
2743 // EMCP.
2744 //
2745 // The old and new methods are EMCP and you would think that we
2746 // could get rid of one of them here and now and save some space.
2747 // However, the concept of EMCP only considers the bytecodes and
2748 // the constant pool entries in the comparison. Other things,
2749 // e.g., the line number table (LNT) or the local variable table
2750 // (LVT) don't count in the comparison. So the new (and EMCP)
2751 // method can have a new LNT that we need so we can't just
2752 // overwrite the new method with the old method.
2753 //
2754 // When this routine is called, we have already attached the new
2755 // methods to the_class so the old methods are effectively
2756 // overwritten. However, if an old method is still executing,
2757 // then the old method cannot be collected until sometime after
2758 // the old method call has returned. So the overwriting of old
2759 // methods by new methods will save us space except for those
2760 // (hopefully few) old methods that are still executing.
2761 //
2762 // A method refers to a ConstMethod* and this presents another
2763 // possible avenue to space savings. The ConstMethod* in the
2764 // new method contains possibly new attributes (LNT, LVT, etc).
2765 // At first glance, it seems possible to save space by replacing
2766 // the ConstMethod* in the old method with the ConstMethod*
2767 // from the new method. The old and new methods would share the
2768 // same ConstMethod* and we would save the space occupied by
2769 // the old ConstMethod*. However, the ConstMethod* contains
2770 // a back reference to the containing method. Sharing the
2771 // ConstMethod* between two methods could lead to confusion in
2772 // the code that uses the back reference. This would lead to
2773 // brittle code that could be broken in non-obvious ways now or
2774 // in the future.
2775 //
2776 // Another possibility is to copy the ConstMethod* from the new
2777 // method to the old method and then overwrite the new method with
2778 // the old method. Since the ConstMethod* contains the bytecodes
2779 // for the method embedded in the oop, this option would change
2780 // the bytecodes out from under any threads executing the old
2781 // method and make the thread's bcp invalid. Since EMCP requires
2782 // that the bytecodes be the same modulo constant pool indices, it
2783 // is straight forward to compute the correct new bcp in the new
2784 // ConstMethod* from the old bcp in the old ConstMethod*. The
2785 // time consuming part would be searching all the frames in all
2786 // of the threads to find all of the calls to the old method.
2787 //
2788 // It looks like we will have to live with the limited savings
2789 // that we get from effectively overwriting the old methods
2790 // when the new methods are attached to the_class.
2792 // track which methods are EMCP for add_previous_version() call
2793 emcp_methods->set_bit(old_index);
2794 (*emcp_method_count_p)++;
2796 // An EMCP method is _not_ obsolete. An obsolete method has a
2797 // different jmethodID than the current method. An EMCP method
2798 // has the same jmethodID as the current method. Having the
2799 // same jmethodID for all EMCP versions of a method allows for
2800 // a consistent view of the EMCP methods regardless of which
2801 // EMCP method you happen to have in hand. For example, a
2802 // breakpoint set in one EMCP method will work for all EMCP
2803 // versions of the method including the current one.
2804 } else {
2805 // mark obsolete methods as such
2806 old_method->set_is_obsolete();
2807 obsolete_count++;
2809 // obsolete methods need a unique idnum
2810 u2 num = InstanceKlass::cast(_the_class_oop)->next_method_idnum();
2811 if (num != ConstMethod::UNSET_IDNUM) {
2812 // u2 old_num = old_method->method_idnum();
2813 old_method->set_method_idnum(num);
2814 // TO DO: attach obsolete annotations to obsolete method's new idnum
2815 }
2816 // With tracing we try not to "yack" too much. The position of
2817 // this trace assumes there are fewer obsolete methods than
2818 // EMCP methods.
2819 RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
2820 old_method->name()->as_C_string(),
2821 old_method->signature()->as_C_string()));
2822 }
2823 old_method->set_is_old();
2824 }
2825 for (int i = 0; i < _deleted_methods_length; ++i) {
2826 Method* old_method = _deleted_methods[i];
2828 assert(old_method->vtable_index() < 0,
2829 "cannot delete methods with vtable entries");;
2831 // Mark all deleted methods as old and obsolete
2832 old_method->set_is_old();
2833 old_method->set_is_obsolete();
2834 ++obsolete_count;
2835 // With tracing we try not to "yack" too much. The position of
2836 // this trace assumes there are fewer obsolete methods than
2837 // EMCP methods.
2838 RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
2839 old_method->name()->as_C_string(),
2840 old_method->signature()->as_C_string()));
2841 }
2842 assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
2843 "sanity check");
2844 RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
2845 obsolete_count));
2846 }
2848 // This internal class transfers the native function registration from old methods
2849 // to new methods. It is designed to handle both the simple case of unchanged
2850 // native methods and the complex cases of native method prefixes being added and/or
2851 // removed.
2852 // It expects only to be used during the VM_RedefineClasses op (a safepoint).
2853 //
2854 // This class is used after the new methods have been installed in "the_class".
2855 //
2856 // So, for example, the following must be handled. Where 'm' is a method and
2857 // a number followed by an underscore is a prefix.
2858 //
2859 // Old Name New Name
2860 // Simple transfer to new method m -> m
2861 // Add prefix m -> 1_m
2862 // Remove prefix 1_m -> m
2863 // Simultaneous add of prefixes m -> 3_2_1_m
2864 // Simultaneous removal of prefixes 3_2_1_m -> m
2865 // Simultaneous add and remove 1_m -> 2_m
2866 // Same, caused by prefix removal only 3_2_1_m -> 3_2_m
2867 //
2868 class TransferNativeFunctionRegistration {
2869 private:
2870 instanceKlassHandle the_class;
2871 int prefix_count;
2872 char** prefixes;
2874 // Recursively search the binary tree of possibly prefixed method names.
2875 // Iteration could be used if all agents were well behaved. Full tree walk is
2876 // more resilent to agents not cleaning up intermediate methods.
2877 // Branch at each depth in the binary tree is:
2878 // (1) without the prefix.
2879 // (2) with the prefix.
2880 // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
2881 Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
2882 Symbol* signature) {
2883 TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
2884 if (name_symbol != NULL) {
2885 Method* method = the_class()->lookup_method(name_symbol, signature);
2886 if (method != NULL) {
2887 // Even if prefixed, intermediate methods must exist.
2888 if (method->is_native()) {
2889 // Wahoo, we found a (possibly prefixed) version of the method, return it.
2890 return method;
2891 }
2892 if (depth < prefix_count) {
2893 // Try applying further prefixes (other than this one).
2894 method = search_prefix_name_space(depth+1, name_str, name_len, signature);
2895 if (method != NULL) {
2896 return method; // found
2897 }
2899 // Try adding this prefix to the method name and see if it matches
2900 // another method name.
2901 char* prefix = prefixes[depth];
2902 size_t prefix_len = strlen(prefix);
2903 size_t trial_len = name_len + prefix_len;
2904 char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
2905 strcpy(trial_name_str, prefix);
2906 strcat(trial_name_str, name_str);
2907 method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
2908 signature);
2909 if (method != NULL) {
2910 // If found along this branch, it was prefixed, mark as such
2911 method->set_is_prefixed_native();
2912 return method; // found
2913 }
2914 }
2915 }
2916 }
2917 return NULL; // This whole branch bore nothing
2918 }
2920 // Return the method name with old prefixes stripped away.
2921 char* method_name_without_prefixes(Method* method) {
2922 Symbol* name = method->name();
2923 char* name_str = name->as_utf8();
2925 // Old prefixing may be defunct, strip prefixes, if any.
2926 for (int i = prefix_count-1; i >= 0; i--) {
2927 char* prefix = prefixes[i];
2928 size_t prefix_len = strlen(prefix);
2929 if (strncmp(prefix, name_str, prefix_len) == 0) {
2930 name_str += prefix_len;
2931 }
2932 }
2933 return name_str;
2934 }
2936 // Strip any prefixes off the old native method, then try to find a
2937 // (possibly prefixed) new native that matches it.
2938 Method* strip_and_search_for_new_native(Method* method) {
2939 ResourceMark rm;
2940 char* name_str = method_name_without_prefixes(method);
2941 return search_prefix_name_space(0, name_str, strlen(name_str),
2942 method->signature());
2943 }
2945 public:
2947 // Construct a native method transfer processor for this class.
2948 TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
2949 assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
2951 the_class = _the_class;
2952 prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
2953 }
2955 // Attempt to transfer any of the old or deleted methods that are native
2956 void transfer_registrations(Method** old_methods, int methods_length) {
2957 for (int j = 0; j < methods_length; j++) {
2958 Method* old_method = old_methods[j];
2960 if (old_method->is_native() && old_method->has_native_function()) {
2961 Method* new_method = strip_and_search_for_new_native(old_method);
2962 if (new_method != NULL) {
2963 // Actually set the native function in the new method.
2964 // Redefine does not send events (except CFLH), certainly not this
2965 // behind the scenes re-registration.
2966 new_method->set_native_function(old_method->native_function(),
2967 !Method::native_bind_event_is_interesting);
2968 }
2969 }
2970 }
2971 }
2972 };
2974 // Don't lose the association between a native method and its JNI function.
2975 void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
2976 TransferNativeFunctionRegistration transfer(the_class);
2977 transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
2978 transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
2979 }
2981 // Deoptimize all compiled code that depends on this class.
2982 //
2983 // If the can_redefine_classes capability is obtained in the onload
2984 // phase then the compiler has recorded all dependencies from startup.
2985 // In that case we need only deoptimize and throw away all compiled code
2986 // that depends on the class.
2987 //
2988 // If can_redefine_classes is obtained sometime after the onload
2989 // phase then the dependency information may be incomplete. In that case
2990 // the first call to RedefineClasses causes all compiled code to be
2991 // thrown away. As can_redefine_classes has been obtained then
2992 // all future compilations will record dependencies so second and
2993 // subsequent calls to RedefineClasses need only throw away code
2994 // that depends on the class.
2995 //
2996 void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
2997 assert_locked_or_safepoint(Compile_lock);
2999 // All dependencies have been recorded from startup or this is a second or
3000 // subsequent use of RedefineClasses
3001 if (JvmtiExport::all_dependencies_are_recorded()) {
3002 Universe::flush_evol_dependents_on(k_h);
3003 } else {
3004 CodeCache::mark_all_nmethods_for_deoptimization();
3006 ResourceMark rm(THREAD);
3007 DeoptimizationMarker dm;
3009 // Deoptimize all activations depending on marked nmethods
3010 Deoptimization::deoptimize_dependents();
3012 // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
3013 CodeCache::make_marked_nmethods_not_entrant();
3015 // From now on we know that the dependency information is complete
3016 JvmtiExport::set_all_dependencies_are_recorded(true);
3017 }
3018 }
3020 void VM_RedefineClasses::compute_added_deleted_matching_methods() {
3021 Method* old_method;
3022 Method* new_method;
3024 _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3025 _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3026 _added_methods = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
3027 _deleted_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
3029 _matching_methods_length = 0;
3030 _deleted_methods_length = 0;
3031 _added_methods_length = 0;
3033 int nj = 0;
3034 int oj = 0;
3035 while (true) {
3036 if (oj >= _old_methods->length()) {
3037 if (nj >= _new_methods->length()) {
3038 break; // we've looked at everything, done
3039 }
3040 // New method at the end
3041 new_method = _new_methods->at(nj);
3042 _added_methods[_added_methods_length++] = new_method;
3043 ++nj;
3044 } else if (nj >= _new_methods->length()) {
3045 // Old method, at the end, is deleted
3046 old_method = _old_methods->at(oj);
3047 _deleted_methods[_deleted_methods_length++] = old_method;
3048 ++oj;
3049 } else {
3050 old_method = _old_methods->at(oj);
3051 new_method = _new_methods->at(nj);
3052 if (old_method->name() == new_method->name()) {
3053 if (old_method->signature() == new_method->signature()) {
3054 _matching_old_methods[_matching_methods_length ] = old_method;
3055 _matching_new_methods[_matching_methods_length++] = new_method;
3056 ++nj;
3057 ++oj;
3058 } else {
3059 // added overloaded have already been moved to the end,
3060 // so this is a deleted overloaded method
3061 _deleted_methods[_deleted_methods_length++] = old_method;
3062 ++oj;
3063 }
3064 } else { // names don't match
3065 if (old_method->name()->fast_compare(new_method->name()) > 0) {
3066 // new method
3067 _added_methods[_added_methods_length++] = new_method;
3068 ++nj;
3069 } else {
3070 // deleted method
3071 _deleted_methods[_deleted_methods_length++] = old_method;
3072 ++oj;
3073 }
3074 }
3075 }
3076 }
3077 assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
3078 assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
3079 }
3083 // Install the redefinition of a class:
3084 // - house keeping (flushing breakpoints and caches, deoptimizing
3085 // dependent compiled code)
3086 // - replacing parts in the_class with parts from scratch_class
3087 // - adding a weak reference to track the obsolete but interesting
3088 // parts of the_class
3089 // - adjusting constant pool caches and vtables in other classes
3090 // that refer to methods in the_class. These adjustments use the
3091 // SystemDictionary::classes_do() facility which only allows
3092 // a helper method to be specified. The interesting parameters
3093 // that we would like to pass to the helper method are saved in
3094 // static global fields in the VM operation.
3095 void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
3096 Klass* scratch_class_oop, TRAPS) {
3098 HandleMark hm(THREAD); // make sure handles from this call are freed
3099 RC_TIMER_START(_timer_rsc_phase1);
3101 instanceKlassHandle scratch_class(scratch_class_oop);
3103 oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
3104 Klass* the_class_oop = java_lang_Class::as_Klass(the_class_mirror);
3105 instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
3107 // Remove all breakpoints in methods of this class
3108 JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
3109 jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
3111 if (the_class_oop == Universe::reflect_invoke_cache()->klass()) {
3112 // We are redefining java.lang.reflect.Method. Method.invoke() is
3113 // cached and users of the cache care about each active version of
3114 // the method so we have to track this previous version.
3115 // Do this before methods get switched
3116 Universe::reflect_invoke_cache()->add_previous_version(
3117 the_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum()));
3118 }
3120 // Deoptimize all compiled code that depends on this class
3121 flush_dependent_code(the_class, THREAD);
3123 _old_methods = the_class->methods();
3124 _new_methods = scratch_class->methods();
3125 _the_class_oop = the_class_oop;
3126 compute_added_deleted_matching_methods();
3127 update_jmethod_ids();
3129 // Attach new constant pool to the original klass. The original
3130 // klass still refers to the old constant pool (for now).
3131 scratch_class->constants()->set_pool_holder(the_class());
3133 #if 0
3134 // In theory, with constant pool merging in place we should be able
3135 // to save space by using the new, merged constant pool in place of
3136 // the old constant pool(s). By "pool(s)" I mean the constant pool in
3137 // the klass version we are replacing now and any constant pool(s) in
3138 // previous versions of klass. Nice theory, doesn't work in practice.
3139 // When this code is enabled, even simple programs throw NullPointer
3140 // exceptions. I'm guessing that this is caused by some constant pool
3141 // cache difference between the new, merged constant pool and the
3142 // constant pool that was just being used by the klass. I'm keeping
3143 // this code around to archive the idea, but the code has to remain
3144 // disabled for now.
3146 // Attach each old method to the new constant pool. This can be
3147 // done here since we are past the bytecode verification and
3148 // constant pool optimization phases.
3149 for (int i = _old_methods->length() - 1; i >= 0; i--) {
3150 Method* method = _old_methods->at(i);
3151 method->set_constants(scratch_class->constants());
3152 }
3154 {
3155 // walk all previous versions of the klass
3156 InstanceKlass *ik = (InstanceKlass *)the_class();
3157 PreviousVersionWalker pvw(ik);
3158 instanceKlassHandle ikh;
3159 do {
3160 ikh = pvw.next_previous_version();
3161 if (!ikh.is_null()) {
3162 ik = ikh();
3164 // attach previous version of klass to the new constant pool
3165 ik->set_constants(scratch_class->constants());
3167 // Attach each method in the previous version of klass to the
3168 // new constant pool
3169 Array<Method*>* prev_methods = ik->methods();
3170 for (int i = prev_methods->length() - 1; i >= 0; i--) {
3171 Method* method = prev_methods->at(i);
3172 method->set_constants(scratch_class->constants());
3173 }
3174 }
3175 } while (!ikh.is_null());
3176 }
3177 #endif
3179 // Replace methods and constantpool
3180 the_class->set_methods(_new_methods);
3181 scratch_class->set_methods(_old_methods); // To prevent potential GCing of the old methods,
3182 // and to be able to undo operation easily.
3184 ConstantPool* old_constants = the_class->constants();
3185 the_class->set_constants(scratch_class->constants());
3186 scratch_class->set_constants(old_constants); // See the previous comment.
3187 #if 0
3188 // We are swapping the guts of "the new class" with the guts of "the
3189 // class". Since the old constant pool has just been attached to "the
3190 // new class", it seems logical to set the pool holder in the old
3191 // constant pool also. However, doing this will change the observable
3192 // class hierarchy for any old methods that are still executing. A
3193 // method can query the identity of its "holder" and this query uses
3194 // the method's constant pool link to find the holder. The change in
3195 // holding class from "the class" to "the new class" can confuse
3196 // things.
3197 //
3198 // Setting the old constant pool's holder will also cause
3199 // verification done during vtable initialization below to fail.
3200 // During vtable initialization, the vtable's class is verified to be
3201 // a subtype of the method's holder. The vtable's class is "the
3202 // class" and the method's holder is gotten from the constant pool
3203 // link in the method itself. For "the class"'s directly implemented
3204 // methods, the method holder is "the class" itself (as gotten from
3205 // the new constant pool). The check works fine in this case. The
3206 // check also works fine for methods inherited from super classes.
3207 //
3208 // Miranda methods are a little more complicated. A miranda method is
3209 // provided by an interface when the class implementing the interface
3210 // does not provide its own method. These interfaces are implemented
3211 // internally as an InstanceKlass. These special instanceKlasses
3212 // share the constant pool of the class that "implements" the
3213 // interface. By sharing the constant pool, the method holder of a
3214 // miranda method is the class that "implements" the interface. In a
3215 // non-redefine situation, the subtype check works fine. However, if
3216 // the old constant pool's pool holder is modified, then the check
3217 // fails because there is no class hierarchy relationship between the
3218 // vtable's class and "the new class".
3220 old_constants->set_pool_holder(scratch_class());
3221 #endif
3223 // track which methods are EMCP for add_previous_version() call below
3224 BitMap emcp_methods(_old_methods->length());
3225 int emcp_method_count = 0;
3226 emcp_methods.clear(); // clears 0..(length() - 1)
3227 check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count);
3228 transfer_old_native_function_registrations(the_class);
3230 // The class file bytes from before any retransformable agents mucked
3231 // with them was cached on the scratch class, move to the_class.
3232 // Note: we still want to do this if nothing needed caching since it
3233 // should get cleared in the_class too.
3234 if (the_class->get_cached_class_file_bytes() == 0) {
3235 // the_class doesn't have a cache yet so copy it
3236 the_class->set_cached_class_file(
3237 scratch_class->get_cached_class_file_bytes(),
3238 scratch_class->get_cached_class_file_len());
3239 }
3240 #ifndef PRODUCT
3241 else {
3242 assert(the_class->get_cached_class_file_bytes() ==
3243 scratch_class->get_cached_class_file_bytes(), "cache ptrs must match");
3244 assert(the_class->get_cached_class_file_len() ==
3245 scratch_class->get_cached_class_file_len(), "cache lens must match");
3246 }
3247 #endif
3249 // Replace inner_classes
3250 Array<u2>* old_inner_classes = the_class->inner_classes();
3251 the_class->set_inner_classes(scratch_class->inner_classes());
3252 scratch_class->set_inner_classes(old_inner_classes);
3254 // Initialize the vtable and interface table after
3255 // methods have been rewritten
3256 {
3257 ResourceMark rm(THREAD);
3258 // no exception should happen here since we explicitly
3259 // do not check loader constraints.
3260 // compare_and_normalize_class_versions has already checked:
3261 // - classloaders unchanged, signatures unchanged
3262 // - all instanceKlasses for redefined classes reused & contents updated
3263 the_class->vtable()->initialize_vtable(false, THREAD);
3264 the_class->itable()->initialize_itable(false, THREAD);
3265 assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
3266 }
3268 // Leave arrays of jmethodIDs and itable index cache unchanged
3270 // Copy the "source file name" attribute from new class version
3271 the_class->set_source_file_name(scratch_class->source_file_name());
3273 // Copy the "source debug extension" attribute from new class version
3274 the_class->set_source_debug_extension(
3275 scratch_class->source_debug_extension(),
3276 scratch_class->source_debug_extension() == NULL ? 0 :
3277 (int)strlen(scratch_class->source_debug_extension()));
3279 // Use of javac -g could be different in the old and the new
3280 if (scratch_class->access_flags().has_localvariable_table() !=
3281 the_class->access_flags().has_localvariable_table()) {
3283 AccessFlags flags = the_class->access_flags();
3284 if (scratch_class->access_flags().has_localvariable_table()) {
3285 flags.set_has_localvariable_table();
3286 } else {
3287 flags.clear_has_localvariable_table();
3288 }
3289 the_class->set_access_flags(flags);
3290 }
3292 // Since there is currently no rewriting of type annotations indexes
3293 // into the CP, we null out type annotations on scratch_class before
3294 // we swap annotations with the_class rather than facing the
3295 // possibility of shipping annotations with broken indexes to
3296 // Java-land.
3297 Annotations* new_annotations = scratch_class->annotations();
3298 if (new_annotations != NULL) {
3299 Annotations* new_type_annotations = new_annotations->type_annotations();
3300 if (new_type_annotations != NULL) {
3301 MetadataFactory::free_metadata(scratch_class->class_loader_data(), new_type_annotations);
3302 new_annotations->set_type_annotations(NULL);
3303 }
3304 }
3305 // Swap annotation fields values
3306 Annotations* old_annotations = the_class->annotations();
3307 the_class->set_annotations(scratch_class->annotations());
3308 scratch_class->set_annotations(old_annotations);
3310 // Replace minor version number of class file
3311 u2 old_minor_version = the_class->minor_version();
3312 the_class->set_minor_version(scratch_class->minor_version());
3313 scratch_class->set_minor_version(old_minor_version);
3315 // Replace major version number of class file
3316 u2 old_major_version = the_class->major_version();
3317 the_class->set_major_version(scratch_class->major_version());
3318 scratch_class->set_major_version(old_major_version);
3320 // Replace CP indexes for class and name+type of enclosing method
3321 u2 old_class_idx = the_class->enclosing_method_class_index();
3322 u2 old_method_idx = the_class->enclosing_method_method_index();
3323 the_class->set_enclosing_method_indices(
3324 scratch_class->enclosing_method_class_index(),
3325 scratch_class->enclosing_method_method_index());
3326 scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
3328 // keep track of previous versions of this class
3329 the_class->add_previous_version(scratch_class, &emcp_methods,
3330 emcp_method_count);
3332 RC_TIMER_STOP(_timer_rsc_phase1);
3333 RC_TIMER_START(_timer_rsc_phase2);
3335 // Adjust constantpool caches and vtables for all classes
3336 // that reference methods of the evolved class.
3337 SystemDictionary::classes_do(adjust_cpool_cache_and_vtable, THREAD);
3339 // Fix Resolution Error table also to remove old constant pools
3340 SystemDictionary::delete_resolution_error(old_constants);
3342 if (the_class->oop_map_cache() != NULL) {
3343 // Flush references to any obsolete methods from the oop map cache
3344 // so that obsolete methods are not pinned.
3345 the_class->oop_map_cache()->flush_obsolete_entries();
3346 }
3348 // increment the classRedefinedCount field in the_class and in any
3349 // direct and indirect subclasses of the_class
3350 increment_class_counter((InstanceKlass *)the_class(), THREAD);
3352 // RC_TRACE macro has an embedded ResourceMark
3353 RC_TRACE_WITH_THREAD(0x00000001, THREAD,
3354 ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
3355 the_class->external_name(),
3356 java_lang_Class::classRedefinedCount(the_class_mirror),
3357 os::available_memory() >> 10));
3359 RC_TIMER_STOP(_timer_rsc_phase2);
3360 } // end redefine_single_class()
3363 // Increment the classRedefinedCount field in the specific InstanceKlass
3364 // and in all direct and indirect subclasses.
3365 void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
3366 oop class_mirror = ik->java_mirror();
3367 Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
3368 int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
3369 java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
3371 if (class_oop != _the_class_oop) {
3372 // _the_class_oop count is printed at end of redefine_single_class()
3373 RC_TRACE_WITH_THREAD(0x00000008, THREAD,
3374 ("updated count in subclass=%s to %d", ik->external_name(), new_count));
3375 }
3377 for (Klass *subk = ik->subklass(); subk != NULL;
3378 subk = subk->next_sibling()) {
3379 if (subk->oop_is_instance()) {
3380 // Only update instanceKlasses
3381 InstanceKlass *subik = (InstanceKlass*)subk;
3382 // recursively do subclasses of the current subclass
3383 increment_class_counter(subik, THREAD);
3384 }
3385 }
3386 }
3388 #ifndef PRODUCT
3389 void VM_RedefineClasses::check_class(Klass* k_oop,
3390 ClassLoaderData* initiating_loader,
3391 TRAPS) {
3392 Klass *k = k_oop;
3393 if (k->oop_is_instance()) {
3394 HandleMark hm(THREAD);
3395 InstanceKlass *ik = (InstanceKlass *) k;
3397 if (ik->vtable_length() > 0) {
3398 ResourceMark rm(THREAD);
3399 if (!ik->vtable()->check_no_old_entries()) {
3400 tty->print_cr("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s", ik->signature_name());
3401 ik->vtable()->dump_vtable();
3402 assert(false, "OLD method found");
3403 }
3404 }
3405 if (ik->itable_length() > 0) {
3406 ResourceMark rm(THREAD);
3407 if (!ik->itable()->check_no_old_entries()) {
3408 tty->print_cr("klassItable::check_no_old_entries failure -- OLD method found -- class: %s", ik->signature_name());
3409 assert(false, "OLD method found");
3410 }
3411 }
3412 // Check that the constant pool cache has no deleted entries.
3413 if (ik->constants() != NULL &&
3414 ik->constants()->cache() != NULL &&
3415 !ik->constants()->cache()->check_no_old_entries()) {
3416 tty->print_cr("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s", ik->signature_name());
3417 assert(false, "OLD method found");
3418 }
3419 }
3420 }
3422 void VM_RedefineClasses::dump_methods() {
3423 int j;
3424 tty->print_cr("_old_methods --");
3425 for (j = 0; j < _old_methods->length(); ++j) {
3426 Method* m = _old_methods->at(j);
3427 tty->print("%4d (%5d) ", j, m->vtable_index());
3428 m->access_flags().print_on(tty);
3429 tty->print(" -- ");
3430 m->print_name(tty);
3431 tty->cr();
3432 }
3433 tty->print_cr("_new_methods --");
3434 for (j = 0; j < _new_methods->length(); ++j) {
3435 Method* m = _new_methods->at(j);
3436 tty->print("%4d (%5d) ", j, m->vtable_index());
3437 m->access_flags().print_on(tty);
3438 tty->print(" -- ");
3439 m->print_name(tty);
3440 tty->cr();
3441 }
3442 tty->print_cr("_matching_(old/new)_methods --");
3443 for (j = 0; j < _matching_methods_length; ++j) {
3444 Method* m = _matching_old_methods[j];
3445 tty->print("%4d (%5d) ", j, m->vtable_index());
3446 m->access_flags().print_on(tty);
3447 tty->print(" -- ");
3448 m->print_name(tty);
3449 tty->cr();
3450 m = _matching_new_methods[j];
3451 tty->print(" (%5d) ", m->vtable_index());
3452 m->access_flags().print_on(tty);
3453 tty->cr();
3454 }
3455 tty->print_cr("_deleted_methods --");
3456 for (j = 0; j < _deleted_methods_length; ++j) {
3457 Method* m = _deleted_methods[j];
3458 tty->print("%4d (%5d) ", j, m->vtable_index());
3459 m->access_flags().print_on(tty);
3460 tty->print(" -- ");
3461 m->print_name(tty);
3462 tty->cr();
3463 }
3464 tty->print_cr("_added_methods --");
3465 for (j = 0; j < _added_methods_length; ++j) {
3466 Method* m = _added_methods[j];
3467 tty->print("%4d (%5d) ", j, m->vtable_index());
3468 m->access_flags().print_on(tty);
3469 tty->print(" -- ");
3470 m->print_name(tty);
3471 tty->cr();
3472 }
3473 }
3474 #endif