src/share/vm/prims/jvmtiRedefineClasses.cpp

changeset 435
a61af66fc99e
child 953
0af8b0718fc9
child 1044
ea20d7ce26b0
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Sat Dec 01 00:00:00 2007 +0000
     1.3 @@ -0,0 +1,3398 @@
     1.4 +/*
     1.5 + * Copyright 2003-2007 Sun Microsystems, Inc.  All Rights Reserved.
     1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     1.7 + *
     1.8 + * This code is free software; you can redistribute it and/or modify it
     1.9 + * under the terms of the GNU General Public License version 2 only, as
    1.10 + * published by the Free Software Foundation.
    1.11 + *
    1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT
    1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    1.14 + * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    1.15 + * version 2 for more details (a copy is included in the LICENSE file that
    1.16 + * accompanied this code).
    1.17 + *
    1.18 + * You should have received a copy of the GNU General Public License version
    1.19 + * 2 along with this work; if not, write to the Free Software Foundation,
    1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    1.21 + *
    1.22 + * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
    1.23 + * CA 95054 USA or visit www.sun.com if you need additional information or
    1.24 + * have any questions.
    1.25 + *
    1.26 + */
    1.27 +
    1.28 +# include "incls/_precompiled.incl"
    1.29 +# include "incls/_jvmtiRedefineClasses.cpp.incl"
    1.30 +
    1.31 +
    1.32 +objArrayOop VM_RedefineClasses::_old_methods = NULL;
    1.33 +objArrayOop VM_RedefineClasses::_new_methods = NULL;
    1.34 +methodOop*  VM_RedefineClasses::_matching_old_methods = NULL;
    1.35 +methodOop*  VM_RedefineClasses::_matching_new_methods = NULL;
    1.36 +methodOop*  VM_RedefineClasses::_deleted_methods      = NULL;
    1.37 +methodOop*  VM_RedefineClasses::_added_methods        = NULL;
    1.38 +int         VM_RedefineClasses::_matching_methods_length = 0;
    1.39 +int         VM_RedefineClasses::_deleted_methods_length  = 0;
    1.40 +int         VM_RedefineClasses::_added_methods_length    = 0;
    1.41 +klassOop    VM_RedefineClasses::_the_class_oop = NULL;
    1.42 +
    1.43 +
    1.44 +VM_RedefineClasses::VM_RedefineClasses(jint class_count,
    1.45 +                                       const jvmtiClassDefinition *class_defs,
    1.46 +                                       JvmtiClassLoadKind class_load_kind) {
    1.47 +  _class_count = class_count;
    1.48 +  _class_defs = class_defs;
    1.49 +  _class_load_kind = class_load_kind;
    1.50 +  _res = JVMTI_ERROR_NONE;
    1.51 +}
    1.52 +
    1.53 +bool VM_RedefineClasses::doit_prologue() {
    1.54 +  if (_class_count == 0) {
    1.55 +    _res = JVMTI_ERROR_NONE;
    1.56 +    return false;
    1.57 +  }
    1.58 +  if (_class_defs == NULL) {
    1.59 +    _res = JVMTI_ERROR_NULL_POINTER;
    1.60 +    return false;
    1.61 +  }
    1.62 +  for (int i = 0; i < _class_count; i++) {
    1.63 +    if (_class_defs[i].klass == NULL) {
    1.64 +      _res = JVMTI_ERROR_INVALID_CLASS;
    1.65 +      return false;
    1.66 +    }
    1.67 +    if (_class_defs[i].class_byte_count == 0) {
    1.68 +      _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
    1.69 +      return false;
    1.70 +    }
    1.71 +    if (_class_defs[i].class_bytes == NULL) {
    1.72 +      _res = JVMTI_ERROR_NULL_POINTER;
    1.73 +      return false;
    1.74 +    }
    1.75 +  }
    1.76 +
    1.77 +  // Start timer after all the sanity checks; not quite accurate, but
    1.78 +  // better than adding a bunch of stop() calls.
    1.79 +  RC_TIMER_START(_timer_vm_op_prologue);
    1.80 +
    1.81 +  // We first load new class versions in the prologue, because somewhere down the
    1.82 +  // call chain it is required that the current thread is a Java thread.
    1.83 +  _res = load_new_class_versions(Thread::current());
    1.84 +  if (_res != JVMTI_ERROR_NONE) {
    1.85 +    // Free os::malloc allocated memory in load_new_class_version.
    1.86 +    os::free(_scratch_classes);
    1.87 +    RC_TIMER_STOP(_timer_vm_op_prologue);
    1.88 +    return false;
    1.89 +  }
    1.90 +
    1.91 +  RC_TIMER_STOP(_timer_vm_op_prologue);
    1.92 +  return true;
    1.93 +}
    1.94 +
    1.95 +void VM_RedefineClasses::doit() {
    1.96 +  Thread *thread = Thread::current();
    1.97 +
    1.98 +  if (UseSharedSpaces) {
    1.99 +    // Sharing is enabled so we remap the shared readonly space to
   1.100 +    // shared readwrite, private just in case we need to redefine
   1.101 +    // a shared class. We do the remap during the doit() phase of
   1.102 +    // the safepoint to be safer.
   1.103 +    if (!CompactingPermGenGen::remap_shared_readonly_as_readwrite()) {
   1.104 +      RC_TRACE_WITH_THREAD(0x00000001, thread,
   1.105 +        ("failed to remap shared readonly space to readwrite, private"));
   1.106 +      _res = JVMTI_ERROR_INTERNAL;
   1.107 +      return;
   1.108 +    }
   1.109 +  }
   1.110 +
   1.111 +  for (int i = 0; i < _class_count; i++) {
   1.112 +    redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
   1.113 +  }
   1.114 +  // Disable any dependent concurrent compilations
   1.115 +  SystemDictionary::notice_modification();
   1.116 +
   1.117 +  // Set flag indicating that some invariants are no longer true.
   1.118 +  // See jvmtiExport.hpp for detailed explanation.
   1.119 +  JvmtiExport::set_has_redefined_a_class();
   1.120 +
   1.121 +#ifdef ASSERT
   1.122 +  SystemDictionary::classes_do(check_class, thread);
   1.123 +#endif
   1.124 +}
   1.125 +
   1.126 +void VM_RedefineClasses::doit_epilogue() {
   1.127 +  // Free os::malloc allocated memory.
   1.128 +  // The memory allocated in redefine will be free'ed in next VM operation.
   1.129 +  os::free(_scratch_classes);
   1.130 +
   1.131 +  if (RC_TRACE_ENABLED(0x00000004)) {
   1.132 +    // Used to have separate timers for "doit" and "all", but the timer
   1.133 +    // overhead skewed the measurements.
   1.134 +    jlong doit_time = _timer_rsc_phase1.milliseconds() +
   1.135 +                      _timer_rsc_phase2.milliseconds();
   1.136 +    jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
   1.137 +
   1.138 +    RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
   1.139 +      "  prologue=" UINT64_FORMAT "  doit=" UINT64_FORMAT, all_time,
   1.140 +      _timer_vm_op_prologue.milliseconds(), doit_time));
   1.141 +    RC_TRACE(0x00000004,
   1.142 +      ("redefine_single_class: phase1=" UINT64_FORMAT "  phase2=" UINT64_FORMAT,
   1.143 +       _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
   1.144 +  }
   1.145 +}
   1.146 +
   1.147 +bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
   1.148 +  // classes for primitives cannot be redefined
   1.149 +  if (java_lang_Class::is_primitive(klass_mirror)) {
   1.150 +    return false;
   1.151 +  }
   1.152 +  klassOop the_class_oop = java_lang_Class::as_klassOop(klass_mirror);
   1.153 +  // classes for arrays cannot be redefined
   1.154 +  if (the_class_oop == NULL || !Klass::cast(the_class_oop)->oop_is_instance()) {
   1.155 +    return false;
   1.156 +  }
   1.157 +  return true;
   1.158 +}
   1.159 +
   1.160 +// Append the current entry at scratch_i in scratch_cp to *merge_cp_p
   1.161 +// where the end of *merge_cp_p is specified by *merge_cp_length_p. For
   1.162 +// direct CP entries, there is just the current entry to append. For
   1.163 +// indirect and double-indirect CP entries, there are zero or more
   1.164 +// referenced CP entries along with the current entry to append.
   1.165 +// Indirect and double-indirect CP entries are handled by recursive
   1.166 +// calls to append_entry() as needed. The referenced CP entries are
   1.167 +// always appended to *merge_cp_p before the referee CP entry. These
   1.168 +// referenced CP entries may already exist in *merge_cp_p in which case
   1.169 +// there is nothing extra to append and only the current entry is
   1.170 +// appended.
   1.171 +void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
   1.172 +       int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
   1.173 +       TRAPS) {
   1.174 +
   1.175 +  // append is different depending on entry tag type
   1.176 +  switch (scratch_cp->tag_at(scratch_i).value()) {
   1.177 +
   1.178 +    // The old verifier is implemented outside the VM. It loads classes,
   1.179 +    // but does not resolve constant pool entries directly so we never
   1.180 +    // see Class entries here with the old verifier. Similarly the old
   1.181 +    // verifier does not like Class entries in the input constant pool.
   1.182 +    // The split-verifier is implemented in the VM so it can optionally
   1.183 +    // and directly resolve constant pool entries to load classes. The
   1.184 +    // split-verifier can accept either Class entries or UnresolvedClass
   1.185 +    // entries in the input constant pool. We revert the appended copy
   1.186 +    // back to UnresolvedClass so that either verifier will be happy
   1.187 +    // with the constant pool entry.
   1.188 +    case JVM_CONSTANT_Class:
   1.189 +    {
   1.190 +      // revert the copy to JVM_CONSTANT_UnresolvedClass
   1.191 +      (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
   1.192 +        scratch_cp->klass_name_at(scratch_i));
   1.193 +
   1.194 +      if (scratch_i != *merge_cp_length_p) {
   1.195 +        // The new entry in *merge_cp_p is at a different index than
   1.196 +        // the new entry in scratch_cp so we need to map the index values.
   1.197 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.198 +      }
   1.199 +      (*merge_cp_length_p)++;
   1.200 +    } break;
   1.201 +
   1.202 +    // these are direct CP entries so they can be directly appended,
   1.203 +    // but double and long take two constant pool entries
   1.204 +    case JVM_CONSTANT_Double:  // fall through
   1.205 +    case JVM_CONSTANT_Long:
   1.206 +    {
   1.207 +      scratch_cp->copy_entry_to(scratch_i, *merge_cp_p, *merge_cp_length_p,
   1.208 +        THREAD);
   1.209 +
   1.210 +      if (scratch_i != *merge_cp_length_p) {
   1.211 +        // The new entry in *merge_cp_p is at a different index than
   1.212 +        // the new entry in scratch_cp so we need to map the index values.
   1.213 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.214 +      }
   1.215 +      (*merge_cp_length_p) += 2;
   1.216 +    } break;
   1.217 +
   1.218 +    // these are direct CP entries so they can be directly appended
   1.219 +    case JVM_CONSTANT_Float:   // fall through
   1.220 +    case JVM_CONSTANT_Integer: // fall through
   1.221 +    case JVM_CONSTANT_Utf8:    // fall through
   1.222 +
   1.223 +    // This was an indirect CP entry, but it has been changed into
   1.224 +    // an interned string so this entry can be directly appended.
   1.225 +    case JVM_CONSTANT_String:      // fall through
   1.226 +
   1.227 +    // These were indirect CP entries, but they have been changed into
   1.228 +    // symbolOops so these entries can be directly appended.
   1.229 +    case JVM_CONSTANT_UnresolvedClass:  // fall through
   1.230 +    case JVM_CONSTANT_UnresolvedString:
   1.231 +    {
   1.232 +      scratch_cp->copy_entry_to(scratch_i, *merge_cp_p, *merge_cp_length_p,
   1.233 +        THREAD);
   1.234 +
   1.235 +      if (scratch_i != *merge_cp_length_p) {
   1.236 +        // The new entry in *merge_cp_p is at a different index than
   1.237 +        // the new entry in scratch_cp so we need to map the index values.
   1.238 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.239 +      }
   1.240 +      (*merge_cp_length_p)++;
   1.241 +    } break;
   1.242 +
   1.243 +    // this is an indirect CP entry so it needs special handling
   1.244 +    case JVM_CONSTANT_NameAndType:
   1.245 +    {
   1.246 +      int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
   1.247 +      int new_name_ref_i = 0;
   1.248 +      bool match = (name_ref_i < *merge_cp_length_p) &&
   1.249 +        scratch_cp->compare_entry_to(name_ref_i, *merge_cp_p, name_ref_i,
   1.250 +          THREAD);
   1.251 +      if (!match) {
   1.252 +        // forward reference in *merge_cp_p or not a direct match
   1.253 +
   1.254 +        int found_i = scratch_cp->find_matching_entry(name_ref_i, *merge_cp_p,
   1.255 +          THREAD);
   1.256 +        if (found_i != 0) {
   1.257 +          guarantee(found_i != name_ref_i,
   1.258 +            "compare_entry_to() and find_matching_entry() do not agree");
   1.259 +
   1.260 +          // Found a matching entry somewhere else in *merge_cp_p so
   1.261 +          // just need a mapping entry.
   1.262 +          new_name_ref_i = found_i;
   1.263 +          map_index(scratch_cp, name_ref_i, found_i);
   1.264 +        } else {
   1.265 +          // no match found so we have to append this entry to *merge_cp_p
   1.266 +          append_entry(scratch_cp, name_ref_i, merge_cp_p, merge_cp_length_p,
   1.267 +            THREAD);
   1.268 +          // The above call to append_entry() can only append one entry
   1.269 +          // so the post call query of *merge_cp_length_p is only for
   1.270 +          // the sake of consistency.
   1.271 +          new_name_ref_i = *merge_cp_length_p - 1;
   1.272 +        }
   1.273 +      }
   1.274 +
   1.275 +      int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
   1.276 +      int new_signature_ref_i = 0;
   1.277 +      match = (signature_ref_i < *merge_cp_length_p) &&
   1.278 +        scratch_cp->compare_entry_to(signature_ref_i, *merge_cp_p,
   1.279 +          signature_ref_i, THREAD);
   1.280 +      if (!match) {
   1.281 +        // forward reference in *merge_cp_p or not a direct match
   1.282 +
   1.283 +        int found_i = scratch_cp->find_matching_entry(signature_ref_i,
   1.284 +          *merge_cp_p, THREAD);
   1.285 +        if (found_i != 0) {
   1.286 +          guarantee(found_i != signature_ref_i,
   1.287 +            "compare_entry_to() and find_matching_entry() do not agree");
   1.288 +
   1.289 +          // Found a matching entry somewhere else in *merge_cp_p so
   1.290 +          // just need a mapping entry.
   1.291 +          new_signature_ref_i = found_i;
   1.292 +          map_index(scratch_cp, signature_ref_i, found_i);
   1.293 +        } else {
   1.294 +          // no match found so we have to append this entry to *merge_cp_p
   1.295 +          append_entry(scratch_cp, signature_ref_i, merge_cp_p,
   1.296 +            merge_cp_length_p, THREAD);
   1.297 +          // The above call to append_entry() can only append one entry
   1.298 +          // so the post call query of *merge_cp_length_p is only for
   1.299 +          // the sake of consistency.
   1.300 +          new_signature_ref_i = *merge_cp_length_p - 1;
   1.301 +        }
   1.302 +      }
   1.303 +
   1.304 +      // If the referenced entries already exist in *merge_cp_p, then
   1.305 +      // both new_name_ref_i and new_signature_ref_i will both be 0.
   1.306 +      // In that case, all we are appending is the current entry.
   1.307 +      if (new_name_ref_i == 0) {
   1.308 +        new_name_ref_i = name_ref_i;
   1.309 +      } else {
   1.310 +        RC_TRACE(0x00080000,
   1.311 +          ("NameAndType entry@%d name_ref_index change: %d to %d",
   1.312 +          *merge_cp_length_p, name_ref_i, new_name_ref_i));
   1.313 +      }
   1.314 +      if (new_signature_ref_i == 0) {
   1.315 +        new_signature_ref_i = signature_ref_i;
   1.316 +      } else {
   1.317 +        RC_TRACE(0x00080000,
   1.318 +          ("NameAndType entry@%d signature_ref_index change: %d to %d",
   1.319 +          *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
   1.320 +      }
   1.321 +
   1.322 +      (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
   1.323 +        new_name_ref_i, new_signature_ref_i);
   1.324 +      if (scratch_i != *merge_cp_length_p) {
   1.325 +        // The new entry in *merge_cp_p is at a different index than
   1.326 +        // the new entry in scratch_cp so we need to map the index values.
   1.327 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.328 +      }
   1.329 +      (*merge_cp_length_p)++;
   1.330 +    } break;
   1.331 +
   1.332 +    // this is a double-indirect CP entry so it needs special handling
   1.333 +    case JVM_CONSTANT_Fieldref:           // fall through
   1.334 +    case JVM_CONSTANT_InterfaceMethodref: // fall through
   1.335 +    case JVM_CONSTANT_Methodref:
   1.336 +    {
   1.337 +      int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
   1.338 +      int new_klass_ref_i = 0;
   1.339 +      bool match = (klass_ref_i < *merge_cp_length_p) &&
   1.340 +        scratch_cp->compare_entry_to(klass_ref_i, *merge_cp_p, klass_ref_i,
   1.341 +          THREAD);
   1.342 +      if (!match) {
   1.343 +        // forward reference in *merge_cp_p or not a direct match
   1.344 +
   1.345 +        int found_i = scratch_cp->find_matching_entry(klass_ref_i, *merge_cp_p,
   1.346 +          THREAD);
   1.347 +        if (found_i != 0) {
   1.348 +          guarantee(found_i != klass_ref_i,
   1.349 +            "compare_entry_to() and find_matching_entry() do not agree");
   1.350 +
   1.351 +          // Found a matching entry somewhere else in *merge_cp_p so
   1.352 +          // just need a mapping entry.
   1.353 +          new_klass_ref_i = found_i;
   1.354 +          map_index(scratch_cp, klass_ref_i, found_i);
   1.355 +        } else {
   1.356 +          // no match found so we have to append this entry to *merge_cp_p
   1.357 +          append_entry(scratch_cp, klass_ref_i, merge_cp_p, merge_cp_length_p,
   1.358 +            THREAD);
   1.359 +          // The above call to append_entry() can only append one entry
   1.360 +          // so the post call query of *merge_cp_length_p is only for
   1.361 +          // the sake of consistency. Without the optimization where we
   1.362 +          // use JVM_CONSTANT_UnresolvedClass, then up to two entries
   1.363 +          // could be appended.
   1.364 +          new_klass_ref_i = *merge_cp_length_p - 1;
   1.365 +        }
   1.366 +      }
   1.367 +
   1.368 +      int name_and_type_ref_i =
   1.369 +        scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
   1.370 +      int new_name_and_type_ref_i = 0;
   1.371 +      match = (name_and_type_ref_i < *merge_cp_length_p) &&
   1.372 +        scratch_cp->compare_entry_to(name_and_type_ref_i, *merge_cp_p,
   1.373 +          name_and_type_ref_i, THREAD);
   1.374 +      if (!match) {
   1.375 +        // forward reference in *merge_cp_p or not a direct match
   1.376 +
   1.377 +        int found_i = scratch_cp->find_matching_entry(name_and_type_ref_i,
   1.378 +          *merge_cp_p, THREAD);
   1.379 +        if (found_i != 0) {
   1.380 +          guarantee(found_i != name_and_type_ref_i,
   1.381 +            "compare_entry_to() and find_matching_entry() do not agree");
   1.382 +
   1.383 +          // Found a matching entry somewhere else in *merge_cp_p so
   1.384 +          // just need a mapping entry.
   1.385 +          new_name_and_type_ref_i = found_i;
   1.386 +          map_index(scratch_cp, name_and_type_ref_i, found_i);
   1.387 +        } else {
   1.388 +          // no match found so we have to append this entry to *merge_cp_p
   1.389 +          append_entry(scratch_cp, name_and_type_ref_i, merge_cp_p,
   1.390 +            merge_cp_length_p, THREAD);
   1.391 +          // The above call to append_entry() can append more than
   1.392 +          // one entry so the post call query of *merge_cp_length_p
   1.393 +          // is required in order to get the right index for the
   1.394 +          // JVM_CONSTANT_NameAndType entry.
   1.395 +          new_name_and_type_ref_i = *merge_cp_length_p - 1;
   1.396 +        }
   1.397 +      }
   1.398 +
   1.399 +      // If the referenced entries already exist in *merge_cp_p, then
   1.400 +      // both new_klass_ref_i and new_name_and_type_ref_i will both be
   1.401 +      // 0. In that case, all we are appending is the current entry.
   1.402 +      if (new_klass_ref_i == 0) {
   1.403 +        new_klass_ref_i = klass_ref_i;
   1.404 +      }
   1.405 +      if (new_name_and_type_ref_i == 0) {
   1.406 +        new_name_and_type_ref_i = name_and_type_ref_i;
   1.407 +      }
   1.408 +
   1.409 +      const char *entry_name;
   1.410 +      switch (scratch_cp->tag_at(scratch_i).value()) {
   1.411 +      case JVM_CONSTANT_Fieldref:
   1.412 +        entry_name = "Fieldref";
   1.413 +        (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
   1.414 +          new_name_and_type_ref_i);
   1.415 +        break;
   1.416 +      case JVM_CONSTANT_InterfaceMethodref:
   1.417 +        entry_name = "IFMethodref";
   1.418 +        (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
   1.419 +          new_klass_ref_i, new_name_and_type_ref_i);
   1.420 +        break;
   1.421 +      case JVM_CONSTANT_Methodref:
   1.422 +        entry_name = "Methodref";
   1.423 +        (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
   1.424 +          new_name_and_type_ref_i);
   1.425 +        break;
   1.426 +      default:
   1.427 +        guarantee(false, "bad switch");
   1.428 +        break;
   1.429 +      }
   1.430 +
   1.431 +      if (klass_ref_i != new_klass_ref_i) {
   1.432 +        RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
   1.433 +          entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
   1.434 +      }
   1.435 +      if (name_and_type_ref_i != new_name_and_type_ref_i) {
   1.436 +        RC_TRACE(0x00080000,
   1.437 +          ("%s entry@%d name_and_type_index changed: %d to %d",
   1.438 +          entry_name, *merge_cp_length_p, name_and_type_ref_i,
   1.439 +          new_name_and_type_ref_i));
   1.440 +      }
   1.441 +
   1.442 +      if (scratch_i != *merge_cp_length_p) {
   1.443 +        // The new entry in *merge_cp_p is at a different index than
   1.444 +        // the new entry in scratch_cp so we need to map the index values.
   1.445 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.446 +      }
   1.447 +      (*merge_cp_length_p)++;
   1.448 +    } break;
   1.449 +
   1.450 +    // At this stage, Class or UnresolvedClass could be here, but not
   1.451 +    // ClassIndex
   1.452 +    case JVM_CONSTANT_ClassIndex: // fall through
   1.453 +
   1.454 +    // Invalid is used as the tag for the second constant pool entry
   1.455 +    // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
   1.456 +    // not be seen by itself.
   1.457 +    case JVM_CONSTANT_Invalid: // fall through
   1.458 +
   1.459 +    // At this stage, String or UnresolvedString could be here, but not
   1.460 +    // StringIndex
   1.461 +    case JVM_CONSTANT_StringIndex: // fall through
   1.462 +
   1.463 +    // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
   1.464 +    // here
   1.465 +    case JVM_CONSTANT_UnresolvedClassInError: // fall through
   1.466 +
   1.467 +    default:
   1.468 +    {
   1.469 +      // leave a breadcrumb
   1.470 +      jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
   1.471 +      ShouldNotReachHere();
   1.472 +    } break;
   1.473 +  } // end switch tag value
   1.474 +} // end append_entry()
   1.475 +
   1.476 +
   1.477 +void VM_RedefineClasses::swap_all_method_annotations(int i, int j, instanceKlassHandle scratch_class) {
   1.478 +  typeArrayOop save;
   1.479 +
   1.480 +  save = scratch_class->get_method_annotations_of(i);
   1.481 +  scratch_class->set_method_annotations_of(i, scratch_class->get_method_annotations_of(j));
   1.482 +  scratch_class->set_method_annotations_of(j, save);
   1.483 +
   1.484 +  save = scratch_class->get_method_parameter_annotations_of(i);
   1.485 +  scratch_class->set_method_parameter_annotations_of(i, scratch_class->get_method_parameter_annotations_of(j));
   1.486 +  scratch_class->set_method_parameter_annotations_of(j, save);
   1.487 +
   1.488 +  save = scratch_class->get_method_default_annotations_of(i);
   1.489 +  scratch_class->set_method_default_annotations_of(i, scratch_class->get_method_default_annotations_of(j));
   1.490 +  scratch_class->set_method_default_annotations_of(j, save);
   1.491 +}
   1.492 +
   1.493 +
   1.494 +jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
   1.495 +             instanceKlassHandle the_class,
   1.496 +             instanceKlassHandle scratch_class) {
   1.497 +  int i;
   1.498 +
   1.499 +  // Check superclasses, or rather their names, since superclasses themselves can be
   1.500 +  // requested to replace.
   1.501 +  // Check for NULL superclass first since this might be java.lang.Object
   1.502 +  if (the_class->super() != scratch_class->super() &&
   1.503 +      (the_class->super() == NULL || scratch_class->super() == NULL ||
   1.504 +       Klass::cast(the_class->super())->name() !=
   1.505 +       Klass::cast(scratch_class->super())->name())) {
   1.506 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.507 +  }
   1.508 +
   1.509 +  // Check if the number, names and order of directly implemented interfaces are the same.
   1.510 +  // I think in principle we should just check if the sets of names of directly implemented
   1.511 +  // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
   1.512 +  // .java file, also changes in .class file) should not matter. However, comparing sets is
   1.513 +  // technically a bit more difficult, and, more importantly, I am not sure at present that the
   1.514 +  // order of interfaces does not matter on the implementation level, i.e. that the VM does not
   1.515 +  // rely on it somewhere.
   1.516 +  objArrayOop k_interfaces = the_class->local_interfaces();
   1.517 +  objArrayOop k_new_interfaces = scratch_class->local_interfaces();
   1.518 +  int n_intfs = k_interfaces->length();
   1.519 +  if (n_intfs != k_new_interfaces->length()) {
   1.520 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.521 +  }
   1.522 +  for (i = 0; i < n_intfs; i++) {
   1.523 +    if (Klass::cast((klassOop) k_interfaces->obj_at(i))->name() !=
   1.524 +        Klass::cast((klassOop) k_new_interfaces->obj_at(i))->name()) {
   1.525 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.526 +    }
   1.527 +  }
   1.528 +
   1.529 +  // Check whether class is in the error init state.
   1.530 +  if (the_class->is_in_error_state()) {
   1.531 +    // TBD #5057930: special error code is needed in 1.6
   1.532 +    return JVMTI_ERROR_INVALID_CLASS;
   1.533 +  }
   1.534 +
   1.535 +  // Check whether class modifiers are the same.
   1.536 +  jushort old_flags = (jushort) the_class->access_flags().get_flags();
   1.537 +  jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
   1.538 +  if (old_flags != new_flags) {
   1.539 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
   1.540 +  }
   1.541 +
   1.542 +  // Check if the number, names, types and order of fields declared in these classes
   1.543 +  // are the same.
   1.544 +  typeArrayOop k_old_fields = the_class->fields();
   1.545 +  typeArrayOop k_new_fields = scratch_class->fields();
   1.546 +  int n_fields = k_old_fields->length();
   1.547 +  if (n_fields != k_new_fields->length()) {
   1.548 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.549 +  }
   1.550 +
   1.551 +  for (i = 0; i < n_fields; i += instanceKlass::next_offset) {
   1.552 +    // access
   1.553 +    old_flags = k_old_fields->ushort_at(i + instanceKlass::access_flags_offset);
   1.554 +    new_flags = k_new_fields->ushort_at(i + instanceKlass::access_flags_offset);
   1.555 +    if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
   1.556 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.557 +    }
   1.558 +    // offset
   1.559 +    if (k_old_fields->short_at(i + instanceKlass::low_offset) !=
   1.560 +        k_new_fields->short_at(i + instanceKlass::low_offset) ||
   1.561 +        k_old_fields->short_at(i + instanceKlass::high_offset) !=
   1.562 +        k_new_fields->short_at(i + instanceKlass::high_offset)) {
   1.563 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.564 +    }
   1.565 +    // name and signature
   1.566 +    jshort name_index = k_old_fields->short_at(i + instanceKlass::name_index_offset);
   1.567 +    jshort sig_index = k_old_fields->short_at(i +instanceKlass::signature_index_offset);
   1.568 +    symbolOop name_sym1 = the_class->constants()->symbol_at(name_index);
   1.569 +    symbolOop sig_sym1 = the_class->constants()->symbol_at(sig_index);
   1.570 +    name_index = k_new_fields->short_at(i + instanceKlass::name_index_offset);
   1.571 +    sig_index = k_new_fields->short_at(i + instanceKlass::signature_index_offset);
   1.572 +    symbolOop name_sym2 = scratch_class->constants()->symbol_at(name_index);
   1.573 +    symbolOop sig_sym2 = scratch_class->constants()->symbol_at(sig_index);
   1.574 +    if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
   1.575 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.576 +    }
   1.577 +  }
   1.578 +
   1.579 +  // Do a parallel walk through the old and new methods. Detect
   1.580 +  // cases where they match (exist in both), have been added in
   1.581 +  // the new methods, or have been deleted (exist only in the
   1.582 +  // old methods).  The class file parser places methods in order
   1.583 +  // by method name, but does not order overloaded methods by
   1.584 +  // signature.  In order to determine what fate befell the methods,
   1.585 +  // this code places the overloaded new methods that have matching
   1.586 +  // old methods in the same order as the old methods and places
   1.587 +  // new overloaded methods at the end of overloaded methods of
   1.588 +  // that name. The code for this order normalization is adapted
   1.589 +  // from the algorithm used in instanceKlass::find_method().
   1.590 +  // Since we are swapping out of order entries as we find them,
   1.591 +  // we only have to search forward through the overloaded methods.
   1.592 +  // Methods which are added and have the same name as an existing
   1.593 +  // method (but different signature) will be put at the end of
   1.594 +  // the methods with that name, and the name mismatch code will
   1.595 +  // handle them.
   1.596 +  objArrayHandle k_old_methods(the_class->methods());
   1.597 +  objArrayHandle k_new_methods(scratch_class->methods());
   1.598 +  int n_old_methods = k_old_methods->length();
   1.599 +  int n_new_methods = k_new_methods->length();
   1.600 +
   1.601 +  int ni = 0;
   1.602 +  int oi = 0;
   1.603 +  while (true) {
   1.604 +    methodOop k_old_method;
   1.605 +    methodOop k_new_method;
   1.606 +    enum { matched, added, deleted, undetermined } method_was = undetermined;
   1.607 +
   1.608 +    if (oi >= n_old_methods) {
   1.609 +      if (ni >= n_new_methods) {
   1.610 +        break; // we've looked at everything, done
   1.611 +      }
   1.612 +      // New method at the end
   1.613 +      k_new_method = (methodOop) k_new_methods->obj_at(ni);
   1.614 +      method_was = added;
   1.615 +    } else if (ni >= n_new_methods) {
   1.616 +      // Old method, at the end, is deleted
   1.617 +      k_old_method = (methodOop) k_old_methods->obj_at(oi);
   1.618 +      method_was = deleted;
   1.619 +    } else {
   1.620 +      // There are more methods in both the old and new lists
   1.621 +      k_old_method = (methodOop) k_old_methods->obj_at(oi);
   1.622 +      k_new_method = (methodOop) k_new_methods->obj_at(ni);
   1.623 +      if (k_old_method->name() != k_new_method->name()) {
   1.624 +        // Methods are sorted by method name, so a mismatch means added
   1.625 +        // or deleted
   1.626 +        if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
   1.627 +          method_was = added;
   1.628 +        } else {
   1.629 +          method_was = deleted;
   1.630 +        }
   1.631 +      } else if (k_old_method->signature() == k_new_method->signature()) {
   1.632 +        // Both the name and signature match
   1.633 +        method_was = matched;
   1.634 +      } else {
   1.635 +        // The name matches, but the signature doesn't, which means we have to
   1.636 +        // search forward through the new overloaded methods.
   1.637 +        int nj;  // outside the loop for post-loop check
   1.638 +        for (nj = ni + 1; nj < n_new_methods; nj++) {
   1.639 +          methodOop m = (methodOop)k_new_methods->obj_at(nj);
   1.640 +          if (k_old_method->name() != m->name()) {
   1.641 +            // reached another method name so no more overloaded methods
   1.642 +            method_was = deleted;
   1.643 +            break;
   1.644 +          }
   1.645 +          if (k_old_method->signature() == m->signature()) {
   1.646 +            // found a match so swap the methods
   1.647 +            k_new_methods->obj_at_put(ni, m);
   1.648 +            k_new_methods->obj_at_put(nj, k_new_method);
   1.649 +            k_new_method = m;
   1.650 +            method_was = matched;
   1.651 +            break;
   1.652 +          }
   1.653 +        }
   1.654 +
   1.655 +        if (nj >= n_new_methods) {
   1.656 +          // reached the end without a match; so method was deleted
   1.657 +          method_was = deleted;
   1.658 +        }
   1.659 +      }
   1.660 +    }
   1.661 +
   1.662 +    switch (method_was) {
   1.663 +    case matched:
   1.664 +      // methods match, be sure modifiers do too
   1.665 +      old_flags = (jushort) k_old_method->access_flags().get_flags();
   1.666 +      new_flags = (jushort) k_new_method->access_flags().get_flags();
   1.667 +      if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
   1.668 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
   1.669 +      }
   1.670 +      {
   1.671 +        u2 new_num = k_new_method->method_idnum();
   1.672 +        u2 old_num = k_old_method->method_idnum();
   1.673 +        if (new_num != old_num) {
   1.674 +          methodOop idnum_owner = scratch_class->method_with_idnum(old_num);
   1.675 +          if (idnum_owner != NULL) {
   1.676 +            // There is already a method assigned this idnum -- switch them
   1.677 +            idnum_owner->set_method_idnum(new_num);
   1.678 +          }
   1.679 +          k_new_method->set_method_idnum(old_num);
   1.680 +          swap_all_method_annotations(old_num, new_num, scratch_class);
   1.681 +        }
   1.682 +      }
   1.683 +      RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
   1.684 +                            k_new_method->name_and_sig_as_C_string(), ni,
   1.685 +                            k_old_method->name_and_sig_as_C_string(), oi));
   1.686 +      // advance to next pair of methods
   1.687 +      ++oi;
   1.688 +      ++ni;
   1.689 +      break;
   1.690 +    case added:
   1.691 +      // method added, see if it is OK
   1.692 +      new_flags = (jushort) k_new_method->access_flags().get_flags();
   1.693 +      if ((new_flags & JVM_ACC_PRIVATE) == 0
   1.694 +           // hack: private should be treated as final, but alas
   1.695 +          || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
   1.696 +         ) {
   1.697 +        // new methods must be private
   1.698 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
   1.699 +      }
   1.700 +      {
   1.701 +        u2 num = the_class->next_method_idnum();
   1.702 +        if (num == constMethodOopDesc::UNSET_IDNUM) {
   1.703 +          // cannot add any more methods
   1.704 +          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
   1.705 +        }
   1.706 +        u2 new_num = k_new_method->method_idnum();
   1.707 +        methodOop idnum_owner = scratch_class->method_with_idnum(num);
   1.708 +        if (idnum_owner != NULL) {
   1.709 +          // There is already a method assigned this idnum -- switch them
   1.710 +          idnum_owner->set_method_idnum(new_num);
   1.711 +        }
   1.712 +        k_new_method->set_method_idnum(num);
   1.713 +        swap_all_method_annotations(new_num, num, scratch_class);
   1.714 +      }
   1.715 +      RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
   1.716 +                            k_new_method->name_and_sig_as_C_string(), ni));
   1.717 +      ++ni; // advance to next new method
   1.718 +      break;
   1.719 +    case deleted:
   1.720 +      // method deleted, see if it is OK
   1.721 +      old_flags = (jushort) k_old_method->access_flags().get_flags();
   1.722 +      if ((old_flags & JVM_ACC_PRIVATE) == 0
   1.723 +           // hack: private should be treated as final, but alas
   1.724 +          || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
   1.725 +         ) {
   1.726 +        // deleted methods must be private
   1.727 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
   1.728 +      }
   1.729 +      RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
   1.730 +                            k_old_method->name_and_sig_as_C_string(), oi));
   1.731 +      ++oi; // advance to next old method
   1.732 +      break;
   1.733 +    default:
   1.734 +      ShouldNotReachHere();
   1.735 +    }
   1.736 +  }
   1.737 +
   1.738 +  return JVMTI_ERROR_NONE;
   1.739 +}
   1.740 +
   1.741 +
   1.742 +// Find new constant pool index value for old constant pool index value
   1.743 +// by seaching the index map. Returns zero (0) if there is no mapped
   1.744 +// value for the old constant pool index.
   1.745 +int VM_RedefineClasses::find_new_index(int old_index) {
   1.746 +  if (_index_map_count == 0) {
   1.747 +    // map is empty so nothing can be found
   1.748 +    return 0;
   1.749 +  }
   1.750 +
   1.751 +  if (old_index < 1 || old_index >= _index_map_p->length()) {
   1.752 +    // The old_index is out of range so it is not mapped. This should
   1.753 +    // not happen in regular constant pool merging use, but it can
   1.754 +    // happen if a corrupt annotation is processed.
   1.755 +    return 0;
   1.756 +  }
   1.757 +
   1.758 +  int value = _index_map_p->at(old_index);
   1.759 +  if (value == -1) {
   1.760 +    // the old_index is not mapped
   1.761 +    return 0;
   1.762 +  }
   1.763 +
   1.764 +  return value;
   1.765 +} // end find_new_index()
   1.766 +
   1.767 +
   1.768 +// Returns true if the current mismatch is due to a resolved/unresolved
   1.769 +// class pair. Otherwise, returns false.
   1.770 +bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
   1.771 +       int index1, constantPoolHandle cp2, int index2) {
   1.772 +
   1.773 +  jbyte t1 = cp1->tag_at(index1).value();
   1.774 +  if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
   1.775 +    return false;  // wrong entry type; not our special case
   1.776 +  }
   1.777 +
   1.778 +  jbyte t2 = cp2->tag_at(index2).value();
   1.779 +  if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
   1.780 +    return false;  // wrong entry type; not our special case
   1.781 +  }
   1.782 +
   1.783 +  if (t1 == t2) {
   1.784 +    return false;  // not a mismatch; not our special case
   1.785 +  }
   1.786 +
   1.787 +  char *s1 = cp1->klass_name_at(index1)->as_C_string();
   1.788 +  char *s2 = cp2->klass_name_at(index2)->as_C_string();
   1.789 +  if (strcmp(s1, s2) != 0) {
   1.790 +    return false;  // strings don't match; not our special case
   1.791 +  }
   1.792 +
   1.793 +  return true;  // made it through the gauntlet; this is our special case
   1.794 +} // end is_unresolved_class_mismatch()
   1.795 +
   1.796 +
   1.797 +// Returns true if the current mismatch is due to a resolved/unresolved
   1.798 +// string pair. Otherwise, returns false.
   1.799 +bool VM_RedefineClasses::is_unresolved_string_mismatch(constantPoolHandle cp1,
   1.800 +       int index1, constantPoolHandle cp2, int index2) {
   1.801 +
   1.802 +  jbyte t1 = cp1->tag_at(index1).value();
   1.803 +  if (t1 != JVM_CONSTANT_String && t1 != JVM_CONSTANT_UnresolvedString) {
   1.804 +    return false;  // wrong entry type; not our special case
   1.805 +  }
   1.806 +
   1.807 +  jbyte t2 = cp2->tag_at(index2).value();
   1.808 +  if (t2 != JVM_CONSTANT_String && t2 != JVM_CONSTANT_UnresolvedString) {
   1.809 +    return false;  // wrong entry type; not our special case
   1.810 +  }
   1.811 +
   1.812 +  if (t1 == t2) {
   1.813 +    return false;  // not a mismatch; not our special case
   1.814 +  }
   1.815 +
   1.816 +  char *s1 = cp1->string_at_noresolve(index1);
   1.817 +  char *s2 = cp2->string_at_noresolve(index2);
   1.818 +  if (strcmp(s1, s2) != 0) {
   1.819 +    return false;  // strings don't match; not our special case
   1.820 +  }
   1.821 +
   1.822 +  return true;  // made it through the gauntlet; this is our special case
   1.823 +} // end is_unresolved_string_mismatch()
   1.824 +
   1.825 +
   1.826 +jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
   1.827 +  // For consistency allocate memory using os::malloc wrapper.
   1.828 +  _scratch_classes = (instanceKlassHandle *)
   1.829 +    os::malloc(sizeof(instanceKlassHandle) * _class_count);
   1.830 +  if (_scratch_classes == NULL) {
   1.831 +    return JVMTI_ERROR_OUT_OF_MEMORY;
   1.832 +  }
   1.833 +
   1.834 +  ResourceMark rm(THREAD);
   1.835 +
   1.836 +  JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
   1.837 +  for (int i = 0; i < _class_count; i++) {
   1.838 +    oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
   1.839 +    // classes for primitives cannot be redefined
   1.840 +    if (!is_modifiable_class(mirror)) {
   1.841 +      return JVMTI_ERROR_UNMODIFIABLE_CLASS;
   1.842 +    }
   1.843 +    klassOop the_class_oop = java_lang_Class::as_klassOop(mirror);
   1.844 +    instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
   1.845 +    symbolHandle the_class_sym = symbolHandle(THREAD, the_class->name());
   1.846 +
   1.847 +    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.848 +    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
   1.849 +      ("loading name=%s (avail_mem=" UINT64_FORMAT "K)",
   1.850 +      the_class->external_name(), os::available_memory() >> 10));
   1.851 +
   1.852 +    ClassFileStream st((u1*) _class_defs[i].class_bytes,
   1.853 +      _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
   1.854 +
   1.855 +    // Parse the stream.
   1.856 +    Handle the_class_loader(THREAD, the_class->class_loader());
   1.857 +    Handle protection_domain(THREAD, the_class->protection_domain());
   1.858 +    // Set redefined class handle in JvmtiThreadState class.
   1.859 +    // This redefined class is sent to agent event handler for class file
   1.860 +    // load hook event.
   1.861 +    state->set_class_being_redefined(&the_class, _class_load_kind);
   1.862 +
   1.863 +    klassOop k = SystemDictionary::parse_stream(the_class_sym,
   1.864 +                                                the_class_loader,
   1.865 +                                                protection_domain,
   1.866 +                                                &st,
   1.867 +                                                THREAD);
   1.868 +    // Clear class_being_redefined just to be sure.
   1.869 +    state->clear_class_being_redefined();
   1.870 +
   1.871 +    // TODO: if this is retransform, and nothing changed we can skip it
   1.872 +
   1.873 +    instanceKlassHandle scratch_class (THREAD, k);
   1.874 +
   1.875 +    if (HAS_PENDING_EXCEPTION) {
   1.876 +      symbolOop ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
   1.877 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.878 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
   1.879 +        ex_name->as_C_string()));
   1.880 +      CLEAR_PENDING_EXCEPTION;
   1.881 +
   1.882 +      if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
   1.883 +        return JVMTI_ERROR_UNSUPPORTED_VERSION;
   1.884 +      } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
   1.885 +        return JVMTI_ERROR_INVALID_CLASS_FORMAT;
   1.886 +      } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
   1.887 +        return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
   1.888 +      } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
   1.889 +        // The message will be "XXX (wrong name: YYY)"
   1.890 +        return JVMTI_ERROR_NAMES_DONT_MATCH;
   1.891 +      } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
   1.892 +        return JVMTI_ERROR_OUT_OF_MEMORY;
   1.893 +      } else {  // Just in case more exceptions can be thrown..
   1.894 +        return JVMTI_ERROR_FAILS_VERIFICATION;
   1.895 +      }
   1.896 +    }
   1.897 +
   1.898 +    // Ensure class is linked before redefine
   1.899 +    if (!the_class->is_linked()) {
   1.900 +      the_class->link_class(THREAD);
   1.901 +      if (HAS_PENDING_EXCEPTION) {
   1.902 +        symbolOop ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
   1.903 +        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.904 +        RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
   1.905 +          ex_name->as_C_string()));
   1.906 +        CLEAR_PENDING_EXCEPTION;
   1.907 +        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
   1.908 +          return JVMTI_ERROR_OUT_OF_MEMORY;
   1.909 +        } else {
   1.910 +          return JVMTI_ERROR_INTERNAL;
   1.911 +        }
   1.912 +      }
   1.913 +    }
   1.914 +
   1.915 +    // Do the validity checks in compare_and_normalize_class_versions()
   1.916 +    // before verifying the byte codes. By doing these checks first, we
   1.917 +    // limit the number of functions that require redirection from
   1.918 +    // the_class to scratch_class. In particular, we don't have to
   1.919 +    // modify JNI GetSuperclass() and thus won't change its performance.
   1.920 +    jvmtiError res = compare_and_normalize_class_versions(the_class,
   1.921 +                       scratch_class);
   1.922 +    if (res != JVMTI_ERROR_NONE) {
   1.923 +      return res;
   1.924 +    }
   1.925 +
   1.926 +    // verify what the caller passed us
   1.927 +    {
   1.928 +      // The bug 6214132 caused the verification to fail.
   1.929 +      // Information about the_class and scratch_class is temporarily
   1.930 +      // recorded into jvmtiThreadState. This data is used to redirect
   1.931 +      // the_class to scratch_class in the JVM_* functions called by the
   1.932 +      // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
   1.933 +      // description.
   1.934 +      RedefineVerifyMark rvm(&the_class, &scratch_class, state);
   1.935 +      Verifier::verify(
   1.936 +        scratch_class, Verifier::ThrowException, THREAD);
   1.937 +    }
   1.938 +
   1.939 +    if (HAS_PENDING_EXCEPTION) {
   1.940 +      symbolOop ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
   1.941 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.942 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
   1.943 +        ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
   1.944 +      CLEAR_PENDING_EXCEPTION;
   1.945 +      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
   1.946 +        return JVMTI_ERROR_OUT_OF_MEMORY;
   1.947 +      } else {
   1.948 +        // tell the caller the bytecodes are bad
   1.949 +        return JVMTI_ERROR_FAILS_VERIFICATION;
   1.950 +      }
   1.951 +    }
   1.952 +
   1.953 +    res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
   1.954 +    if (res != JVMTI_ERROR_NONE) {
   1.955 +      return res;
   1.956 +    }
   1.957 +
   1.958 +    if (VerifyMergedCPBytecodes) {
   1.959 +      // verify what we have done during constant pool merging
   1.960 +      {
   1.961 +        RedefineVerifyMark rvm(&the_class, &scratch_class, state);
   1.962 +        Verifier::verify(scratch_class, Verifier::ThrowException, THREAD);
   1.963 +      }
   1.964 +
   1.965 +      if (HAS_PENDING_EXCEPTION) {
   1.966 +        symbolOop ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
   1.967 +        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.968 +        RC_TRACE_WITH_THREAD(0x00000002, THREAD,
   1.969 +          ("verify_byte_codes post merge-CP exception: '%s'",
   1.970 +          ex_name->as_C_string()));
   1.971 +        CLEAR_PENDING_EXCEPTION;
   1.972 +        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
   1.973 +          return JVMTI_ERROR_OUT_OF_MEMORY;
   1.974 +        } else {
   1.975 +          // tell the caller that constant pool merging screwed up
   1.976 +          return JVMTI_ERROR_INTERNAL;
   1.977 +        }
   1.978 +      }
   1.979 +    }
   1.980 +
   1.981 +    Rewriter::rewrite(scratch_class, THREAD);
   1.982 +    if (HAS_PENDING_EXCEPTION) {
   1.983 +      symbolOop ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
   1.984 +      CLEAR_PENDING_EXCEPTION;
   1.985 +      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
   1.986 +        return JVMTI_ERROR_OUT_OF_MEMORY;
   1.987 +      } else {
   1.988 +        return JVMTI_ERROR_INTERNAL;
   1.989 +      }
   1.990 +    }
   1.991 +
   1.992 +    _scratch_classes[i] = scratch_class;
   1.993 +
   1.994 +    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.995 +    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
   1.996 +      ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
   1.997 +      the_class->external_name(), os::available_memory() >> 10));
   1.998 +  }
   1.999 +
  1.1000 +  return JVMTI_ERROR_NONE;
  1.1001 +}
  1.1002 +
  1.1003 +
  1.1004 +// Map old_index to new_index as needed. scratch_cp is only needed
  1.1005 +// for RC_TRACE() calls.
  1.1006 +void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
  1.1007 +       int old_index, int new_index) {
  1.1008 +  if (find_new_index(old_index) != 0) {
  1.1009 +    // old_index is already mapped
  1.1010 +    return;
  1.1011 +  }
  1.1012 +
  1.1013 +  if (old_index == new_index) {
  1.1014 +    // no mapping is needed
  1.1015 +    return;
  1.1016 +  }
  1.1017 +
  1.1018 +  _index_map_p->at_put(old_index, new_index);
  1.1019 +  _index_map_count++;
  1.1020 +
  1.1021 +  RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
  1.1022 +    scratch_cp->tag_at(old_index).value(), old_index, new_index));
  1.1023 +} // end map_index()
  1.1024 +
  1.1025 +
  1.1026 +// Merge old_cp and scratch_cp and return the results of the merge via
  1.1027 +// merge_cp_p. The number of entries in *merge_cp_p is returned via
  1.1028 +// merge_cp_length_p. The entries in old_cp occupy the same locations
  1.1029 +// in *merge_cp_p. Also creates a map of indices from entries in
  1.1030 +// scratch_cp to the corresponding entry in *merge_cp_p. Index map
  1.1031 +// entries are only created for entries in scratch_cp that occupy a
  1.1032 +// different location in *merged_cp_p.
  1.1033 +bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
  1.1034 +       constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
  1.1035 +       int *merge_cp_length_p, TRAPS) {
  1.1036 +
  1.1037 +  if (merge_cp_p == NULL) {
  1.1038 +    assert(false, "caller must provide scatch constantPool");
  1.1039 +    return false; // robustness
  1.1040 +  }
  1.1041 +  if (merge_cp_length_p == NULL) {
  1.1042 +    assert(false, "caller must provide scatch CP length");
  1.1043 +    return false; // robustness
  1.1044 +  }
  1.1045 +  // Worst case we need old_cp->length() + scratch_cp()->length(),
  1.1046 +  // but the caller might be smart so make sure we have at least
  1.1047 +  // the minimum.
  1.1048 +  if ((*merge_cp_p)->length() < old_cp->length()) {
  1.1049 +    assert(false, "merge area too small");
  1.1050 +    return false; // robustness
  1.1051 +  }
  1.1052 +
  1.1053 +  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
  1.1054 +    ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
  1.1055 +    scratch_cp->length()));
  1.1056 +
  1.1057 +  {
  1.1058 +    // Pass 0:
  1.1059 +    // The old_cp is copied to *merge_cp_p; this means that any code
  1.1060 +    // using old_cp does not have to change. This work looks like a
  1.1061 +    // perfect fit for constantPoolOop::copy_cp_to(), but we need to
  1.1062 +    // handle one special case:
  1.1063 +    // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
  1.1064 +    // This will make verification happy.
  1.1065 +
  1.1066 +    int old_i;  // index into old_cp
  1.1067 +
  1.1068 +    // index zero (0) is not used in constantPools
  1.1069 +    for (old_i = 1; old_i < old_cp->length(); old_i++) {
  1.1070 +      // leave debugging crumb
  1.1071 +      jbyte old_tag = old_cp->tag_at(old_i).value();
  1.1072 +      switch (old_tag) {
  1.1073 +      case JVM_CONSTANT_Class:
  1.1074 +        // revert the copy to JVM_CONSTANT_UnresolvedClass
  1.1075 +        (*merge_cp_p)->unresolved_klass_at_put(old_i,
  1.1076 +          old_cp->klass_name_at(old_i));
  1.1077 +        break;
  1.1078 +
  1.1079 +      case JVM_CONSTANT_Double:
  1.1080 +      case JVM_CONSTANT_Long:
  1.1081 +        // just copy the entry to *merge_cp_p, but double and long take
  1.1082 +        // two constant pool entries
  1.1083 +        old_cp->copy_entry_to(old_i, *merge_cp_p, old_i, CHECK_0);
  1.1084 +        old_i++;
  1.1085 +        break;
  1.1086 +
  1.1087 +      default:
  1.1088 +        // just copy the entry to *merge_cp_p
  1.1089 +        old_cp->copy_entry_to(old_i, *merge_cp_p, old_i, CHECK_0);
  1.1090 +        break;
  1.1091 +      }
  1.1092 +    } // end for each old_cp entry
  1.1093 +
  1.1094 +    // We don't need to sanity check that *merge_cp_length_p is within
  1.1095 +    // *merge_cp_p bounds since we have the minimum on-entry check above.
  1.1096 +    (*merge_cp_length_p) = old_i;
  1.1097 +  }
  1.1098 +
  1.1099 +  // merge_cp_len should be the same as old_cp->length() at this point
  1.1100 +  // so this trace message is really a "warm-and-breathing" message.
  1.1101 +  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1102 +    ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
  1.1103 +
  1.1104 +  int scratch_i;  // index into scratch_cp
  1.1105 +  {
  1.1106 +    // Pass 1a:
  1.1107 +    // Compare scratch_cp entries to the old_cp entries that we have
  1.1108 +    // already copied to *merge_cp_p. In this pass, we are eliminating
  1.1109 +    // exact duplicates (matching entry at same index) so we only
  1.1110 +    // compare entries in the common indice range.
  1.1111 +    int increment = 1;
  1.1112 +    int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
  1.1113 +    for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
  1.1114 +      switch (scratch_cp->tag_at(scratch_i).value()) {
  1.1115 +      case JVM_CONSTANT_Double:
  1.1116 +      case JVM_CONSTANT_Long:
  1.1117 +        // double and long take two constant pool entries
  1.1118 +        increment = 2;
  1.1119 +        break;
  1.1120 +
  1.1121 +      default:
  1.1122 +        increment = 1;
  1.1123 +        break;
  1.1124 +      }
  1.1125 +
  1.1126 +      bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
  1.1127 +        scratch_i, CHECK_0);
  1.1128 +      if (match) {
  1.1129 +        // found a match at the same index so nothing more to do
  1.1130 +        continue;
  1.1131 +      } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
  1.1132 +                                              *merge_cp_p, scratch_i)) {
  1.1133 +        // The mismatch in compare_entry_to() above is because of a
  1.1134 +        // resolved versus unresolved class entry at the same index
  1.1135 +        // with the same string value. Since Pass 0 reverted any
  1.1136 +        // class entries to unresolved class entries in *merge_cp_p,
  1.1137 +        // we go with the unresolved class entry.
  1.1138 +        continue;
  1.1139 +      } else if (is_unresolved_string_mismatch(scratch_cp, scratch_i,
  1.1140 +                                               *merge_cp_p, scratch_i)) {
  1.1141 +        // The mismatch in compare_entry_to() above is because of a
  1.1142 +        // resolved versus unresolved string entry at the same index
  1.1143 +        // with the same string value. We can live with whichever
  1.1144 +        // happens to be at scratch_i in *merge_cp_p.
  1.1145 +        continue;
  1.1146 +      }
  1.1147 +
  1.1148 +      int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
  1.1149 +        CHECK_0);
  1.1150 +      if (found_i != 0) {
  1.1151 +        guarantee(found_i != scratch_i,
  1.1152 +          "compare_entry_to() and find_matching_entry() do not agree");
  1.1153 +
  1.1154 +        // Found a matching entry somewhere else in *merge_cp_p so
  1.1155 +        // just need a mapping entry.
  1.1156 +        map_index(scratch_cp, scratch_i, found_i);
  1.1157 +        continue;
  1.1158 +      }
  1.1159 +
  1.1160 +      // The find_matching_entry() call above could fail to find a match
  1.1161 +      // due to a resolved versus unresolved class or string entry situation
  1.1162 +      // like we solved above with the is_unresolved_*_mismatch() calls.
  1.1163 +      // However, we would have to call is_unresolved_*_mismatch() over
  1.1164 +      // all of *merge_cp_p (potentially) and that doesn't seem to be
  1.1165 +      // worth the time.
  1.1166 +
  1.1167 +      // No match found so we have to append this entry and any unique
  1.1168 +      // referenced entries to *merge_cp_p.
  1.1169 +      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
  1.1170 +        CHECK_0);
  1.1171 +    }
  1.1172 +  }
  1.1173 +
  1.1174 +  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1175 +    ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
  1.1176 +    *merge_cp_length_p, scratch_i, _index_map_count));
  1.1177 +
  1.1178 +  if (scratch_i < scratch_cp->length()) {
  1.1179 +    // Pass 1b:
  1.1180 +    // old_cp is smaller than scratch_cp so there are entries in
  1.1181 +    // scratch_cp that we have not yet processed. We take care of
  1.1182 +    // those now.
  1.1183 +    int increment = 1;
  1.1184 +    for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
  1.1185 +      switch (scratch_cp->tag_at(scratch_i).value()) {
  1.1186 +      case JVM_CONSTANT_Double:
  1.1187 +      case JVM_CONSTANT_Long:
  1.1188 +        // double and long take two constant pool entries
  1.1189 +        increment = 2;
  1.1190 +        break;
  1.1191 +
  1.1192 +      default:
  1.1193 +        increment = 1;
  1.1194 +        break;
  1.1195 +      }
  1.1196 +
  1.1197 +      int found_i =
  1.1198 +        scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
  1.1199 +      if (found_i != 0) {
  1.1200 +        // Found a matching entry somewhere else in *merge_cp_p so
  1.1201 +        // just need a mapping entry.
  1.1202 +        map_index(scratch_cp, scratch_i, found_i);
  1.1203 +        continue;
  1.1204 +      }
  1.1205 +
  1.1206 +      // No match found so we have to append this entry and any unique
  1.1207 +      // referenced entries to *merge_cp_p.
  1.1208 +      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
  1.1209 +        CHECK_0);
  1.1210 +    }
  1.1211 +
  1.1212 +    RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1213 +      ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
  1.1214 +      *merge_cp_length_p, scratch_i, _index_map_count));
  1.1215 +  }
  1.1216 +
  1.1217 +  return true;
  1.1218 +} // end merge_constant_pools()
  1.1219 +
  1.1220 +
  1.1221 +// Merge constant pools between the_class and scratch_class and
  1.1222 +// potentially rewrite bytecodes in scratch_class to use the merged
  1.1223 +// constant pool.
  1.1224 +jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
  1.1225 +             instanceKlassHandle the_class, instanceKlassHandle scratch_class,
  1.1226 +             TRAPS) {
  1.1227 +  // worst case merged constant pool length is old and new combined
  1.1228 +  int merge_cp_length = the_class->constants()->length()
  1.1229 +        + scratch_class->constants()->length();
  1.1230 +
  1.1231 +  constantPoolHandle old_cp(THREAD, the_class->constants());
  1.1232 +  constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
  1.1233 +
  1.1234 +  // Constant pools are not easily reused so we allocate a new one
  1.1235 +  // each time.
  1.1236 +  constantPoolHandle merge_cp(THREAD,
  1.1237 +    oopFactory::new_constantPool(merge_cp_length, THREAD));
  1.1238 +  int orig_length = old_cp->orig_length();
  1.1239 +  if (orig_length == 0) {
  1.1240 +    // This old_cp is an actual original constant pool. We save
  1.1241 +    // the original length in the merged constant pool so that
  1.1242 +    // merge_constant_pools() can be more efficient. If a constant
  1.1243 +    // pool has a non-zero orig_length() value, then that constant
  1.1244 +    // pool was created by a merge operation in RedefineClasses.
  1.1245 +    merge_cp->set_orig_length(old_cp->length());
  1.1246 +  } else {
  1.1247 +    // This old_cp is a merged constant pool from a previous
  1.1248 +    // RedefineClasses() calls so just copy the orig_length()
  1.1249 +    // value.
  1.1250 +    merge_cp->set_orig_length(old_cp->orig_length());
  1.1251 +  }
  1.1252 +
  1.1253 +  ResourceMark rm(THREAD);
  1.1254 +  _index_map_count = 0;
  1.1255 +  _index_map_p = new intArray(scratch_cp->length(), -1);
  1.1256 +
  1.1257 +  bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
  1.1258 +                  &merge_cp_length, THREAD);
  1.1259 +  if (!result) {
  1.1260 +    // The merge can fail due to memory allocation failure or due
  1.1261 +    // to robustness checks.
  1.1262 +    return JVMTI_ERROR_INTERNAL;
  1.1263 +  }
  1.1264 +
  1.1265 +  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
  1.1266 +    ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
  1.1267 +
  1.1268 +  if (_index_map_count == 0) {
  1.1269 +    // there is nothing to map between the new and merged constant pools
  1.1270 +
  1.1271 +    if (old_cp->length() == scratch_cp->length()) {
  1.1272 +      // The old and new constant pools are the same length and the
  1.1273 +      // index map is empty. This means that the three constant pools
  1.1274 +      // are equivalent (but not the same). Unfortunately, the new
  1.1275 +      // constant pool has not gone through link resolution nor have
  1.1276 +      // the new class bytecodes gone through constant pool cache
  1.1277 +      // rewriting so we can't use the old constant pool with the new
  1.1278 +      // class.
  1.1279 +
  1.1280 +      merge_cp = constantPoolHandle();  // toss the merged constant pool
  1.1281 +    } else if (old_cp->length() < scratch_cp->length()) {
  1.1282 +      // The old constant pool has fewer entries than the new constant
  1.1283 +      // pool and the index map is empty. This means the new constant
  1.1284 +      // pool is a superset of the old constant pool. However, the old
  1.1285 +      // class bytecodes have already gone through constant pool cache
  1.1286 +      // rewriting so we can't use the new constant pool with the old
  1.1287 +      // class.
  1.1288 +
  1.1289 +      merge_cp = constantPoolHandle();  // toss the merged constant pool
  1.1290 +    } else {
  1.1291 +      // The old constant pool has more entries than the new constant
  1.1292 +      // pool and the index map is empty. This means that both the old
  1.1293 +      // and merged constant pools are supersets of the new constant
  1.1294 +      // pool.
  1.1295 +
  1.1296 +      // Replace the new constant pool with a shrunken copy of the
  1.1297 +      // merged constant pool; the previous new constant pool will
  1.1298 +      // get GCed.
  1.1299 +      set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true,
  1.1300 +        THREAD);
  1.1301 +      // drop local ref to the merged constant pool
  1.1302 +      merge_cp = constantPoolHandle();
  1.1303 +    }
  1.1304 +  } else {
  1.1305 +    if (RC_TRACE_ENABLED(0x00040000)) {
  1.1306 +      // don't want to loop unless we are tracing
  1.1307 +      int count = 0;
  1.1308 +      for (int i = 1; i < _index_map_p->length(); i++) {
  1.1309 +        int value = _index_map_p->at(i);
  1.1310 +
  1.1311 +        if (value != -1) {
  1.1312 +          RC_TRACE_WITH_THREAD(0x00040000, THREAD,
  1.1313 +            ("index_map[%d]: old=%d new=%d", count, i, value));
  1.1314 +          count++;
  1.1315 +        }
  1.1316 +      }
  1.1317 +    }
  1.1318 +
  1.1319 +    // We have entries mapped between the new and merged constant pools
  1.1320 +    // so we have to rewrite some constant pool references.
  1.1321 +    if (!rewrite_cp_refs(scratch_class, THREAD)) {
  1.1322 +      return JVMTI_ERROR_INTERNAL;
  1.1323 +    }
  1.1324 +
  1.1325 +    // Replace the new constant pool with a shrunken copy of the
  1.1326 +    // merged constant pool so now the rewritten bytecodes have
  1.1327 +    // valid references; the previous new constant pool will get
  1.1328 +    // GCed.
  1.1329 +    set_new_constant_pool(scratch_class, merge_cp, merge_cp_length, true,
  1.1330 +      THREAD);
  1.1331 +  }
  1.1332 +
  1.1333 +  return JVMTI_ERROR_NONE;
  1.1334 +} // end merge_cp_and_rewrite()
  1.1335 +
  1.1336 +
  1.1337 +// Rewrite constant pool references in klass scratch_class.
  1.1338 +bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
  1.1339 +       TRAPS) {
  1.1340 +
  1.1341 +  // rewrite constant pool references in the methods:
  1.1342 +  if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
  1.1343 +    // propogate failure back to caller
  1.1344 +    return false;
  1.1345 +  }
  1.1346 +
  1.1347 +  // rewrite constant pool references in the class_annotations:
  1.1348 +  if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
  1.1349 +    // propogate failure back to caller
  1.1350 +    return false;
  1.1351 +  }
  1.1352 +
  1.1353 +  // rewrite constant pool references in the fields_annotations:
  1.1354 +  if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
  1.1355 +    // propogate failure back to caller
  1.1356 +    return false;
  1.1357 +  }
  1.1358 +
  1.1359 +  // rewrite constant pool references in the methods_annotations:
  1.1360 +  if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
  1.1361 +    // propogate failure back to caller
  1.1362 +    return false;
  1.1363 +  }
  1.1364 +
  1.1365 +  // rewrite constant pool references in the methods_parameter_annotations:
  1.1366 +  if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
  1.1367 +         THREAD)) {
  1.1368 +    // propogate failure back to caller
  1.1369 +    return false;
  1.1370 +  }
  1.1371 +
  1.1372 +  // rewrite constant pool references in the methods_default_annotations:
  1.1373 +  if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
  1.1374 +         THREAD)) {
  1.1375 +    // propogate failure back to caller
  1.1376 +    return false;
  1.1377 +  }
  1.1378 +
  1.1379 +  return true;
  1.1380 +} // end rewrite_cp_refs()
  1.1381 +
  1.1382 +
  1.1383 +// Rewrite constant pool references in the methods.
  1.1384 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
  1.1385 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1386 +
  1.1387 +  objArrayHandle methods(THREAD, scratch_class->methods());
  1.1388 +
  1.1389 +  if (methods.is_null() || methods->length() == 0) {
  1.1390 +    // no methods so nothing to do
  1.1391 +    return true;
  1.1392 +  }
  1.1393 +
  1.1394 +  // rewrite constant pool references in the methods:
  1.1395 +  for (int i = methods->length() - 1; i >= 0; i--) {
  1.1396 +    methodHandle method(THREAD, (methodOop)methods->obj_at(i));
  1.1397 +    methodHandle new_method;
  1.1398 +    rewrite_cp_refs_in_method(method, &new_method, CHECK_false);
  1.1399 +    if (!new_method.is_null()) {
  1.1400 +      // the method has been replaced so save the new method version
  1.1401 +      methods->obj_at_put(i, new_method());
  1.1402 +    }
  1.1403 +  }
  1.1404 +
  1.1405 +  return true;
  1.1406 +}
  1.1407 +
  1.1408 +
  1.1409 +// Rewrite constant pool references in the specific method. This code
  1.1410 +// was adapted from Rewriter::rewrite_method().
  1.1411 +void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
  1.1412 +       methodHandle *new_method_p, TRAPS) {
  1.1413 +
  1.1414 +  *new_method_p = methodHandle();  // default is no new method
  1.1415 +
  1.1416 +  // We cache a pointer to the bytecodes here in code_base. If GC
  1.1417 +  // moves the methodOop, then the bytecodes will also move which
  1.1418 +  // will likely cause a crash. We create a No_Safepoint_Verifier
  1.1419 +  // object to detect whether we pass a possible safepoint in this
  1.1420 +  // code block.
  1.1421 +  No_Safepoint_Verifier nsv;
  1.1422 +
  1.1423 +  // Bytecodes and their length
  1.1424 +  address code_base = method->code_base();
  1.1425 +  int code_length = method->code_size();
  1.1426 +
  1.1427 +  int bc_length;
  1.1428 +  for (int bci = 0; bci < code_length; bci += bc_length) {
  1.1429 +    address bcp = code_base + bci;
  1.1430 +    Bytecodes::Code c = (Bytecodes::Code)(*bcp);
  1.1431 +
  1.1432 +    bc_length = Bytecodes::length_for(c);
  1.1433 +    if (bc_length == 0) {
  1.1434 +      // More complicated bytecodes report a length of zero so
  1.1435 +      // we have to try again a slightly different way.
  1.1436 +      bc_length = Bytecodes::length_at(bcp);
  1.1437 +    }
  1.1438 +
  1.1439 +    assert(bc_length != 0, "impossible bytecode length");
  1.1440 +
  1.1441 +    switch (c) {
  1.1442 +      case Bytecodes::_ldc:
  1.1443 +      {
  1.1444 +        int cp_index = *(bcp + 1);
  1.1445 +        int new_index = find_new_index(cp_index);
  1.1446 +
  1.1447 +        if (StressLdcRewrite && new_index == 0) {
  1.1448 +          // If we are stressing ldc -> ldc_w rewriting, then we
  1.1449 +          // always need a new_index value.
  1.1450 +          new_index = cp_index;
  1.1451 +        }
  1.1452 +        if (new_index != 0) {
  1.1453 +          // the original index is mapped so we have more work to do
  1.1454 +          if (!StressLdcRewrite && new_index <= max_jubyte) {
  1.1455 +            // The new value can still use ldc instead of ldc_w
  1.1456 +            // unless we are trying to stress ldc -> ldc_w rewriting
  1.1457 +            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1458 +              ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
  1.1459 +              bcp, cp_index, new_index));
  1.1460 +            *(bcp + 1) = new_index;
  1.1461 +          } else {
  1.1462 +            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1463 +              ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
  1.1464 +              Bytecodes::name(c), bcp, cp_index, new_index));
  1.1465 +            // the new value needs ldc_w instead of ldc
  1.1466 +            u_char inst_buffer[4]; // max instruction size is 4 bytes
  1.1467 +            bcp = (address)inst_buffer;
  1.1468 +            // construct new instruction sequence
  1.1469 +            *bcp = Bytecodes::_ldc_w;
  1.1470 +            bcp++;
  1.1471 +            // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
  1.1472 +            // See comment below for difference between put_Java_u2()
  1.1473 +            // and put_native_u2().
  1.1474 +            Bytes::put_Java_u2(bcp, new_index);
  1.1475 +
  1.1476 +            Relocator rc(method, NULL /* no RelocatorListener needed */);
  1.1477 +            methodHandle m;
  1.1478 +            {
  1.1479 +              Pause_No_Safepoint_Verifier pnsv(&nsv);
  1.1480 +
  1.1481 +              // ldc is 2 bytes and ldc_w is 3 bytes
  1.1482 +              m = rc.insert_space_at(bci, 3, inst_buffer, THREAD);
  1.1483 +              if (m.is_null() || HAS_PENDING_EXCEPTION) {
  1.1484 +                guarantee(false, "insert_space_at() failed");
  1.1485 +              }
  1.1486 +            }
  1.1487 +
  1.1488 +            // return the new method so that the caller can update
  1.1489 +            // the containing class
  1.1490 +            *new_method_p = method = m;
  1.1491 +            // switch our bytecode processing loop from the old method
  1.1492 +            // to the new method
  1.1493 +            code_base = method->code_base();
  1.1494 +            code_length = method->code_size();
  1.1495 +            bcp = code_base + bci;
  1.1496 +            c = (Bytecodes::Code)(*bcp);
  1.1497 +            bc_length = Bytecodes::length_for(c);
  1.1498 +            assert(bc_length != 0, "sanity check");
  1.1499 +          } // end we need ldc_w instead of ldc
  1.1500 +        } // end if there is a mapped index
  1.1501 +      } break;
  1.1502 +
  1.1503 +      // these bytecodes have a two-byte constant pool index
  1.1504 +      case Bytecodes::_anewarray      : // fall through
  1.1505 +      case Bytecodes::_checkcast      : // fall through
  1.1506 +      case Bytecodes::_getfield       : // fall through
  1.1507 +      case Bytecodes::_getstatic      : // fall through
  1.1508 +      case Bytecodes::_instanceof     : // fall through
  1.1509 +      case Bytecodes::_invokeinterface: // fall through
  1.1510 +      case Bytecodes::_invokespecial  : // fall through
  1.1511 +      case Bytecodes::_invokestatic   : // fall through
  1.1512 +      case Bytecodes::_invokevirtual  : // fall through
  1.1513 +      case Bytecodes::_ldc_w          : // fall through
  1.1514 +      case Bytecodes::_ldc2_w         : // fall through
  1.1515 +      case Bytecodes::_multianewarray : // fall through
  1.1516 +      case Bytecodes::_new            : // fall through
  1.1517 +      case Bytecodes::_putfield       : // fall through
  1.1518 +      case Bytecodes::_putstatic      :
  1.1519 +      {
  1.1520 +        address p = bcp + 1;
  1.1521 +        int cp_index = Bytes::get_Java_u2(p);
  1.1522 +        int new_index = find_new_index(cp_index);
  1.1523 +        if (new_index != 0) {
  1.1524 +          // the original index is mapped so update w/ new value
  1.1525 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1526 +            ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
  1.1527 +            bcp, cp_index, new_index));
  1.1528 +          // Rewriter::rewrite_method() uses put_native_u2() in this
  1.1529 +          // situation because it is reusing the constant pool index
  1.1530 +          // location for a native index into the constantPoolCache.
  1.1531 +          // Since we are updating the constant pool index prior to
  1.1532 +          // verification and constantPoolCache initialization, we
  1.1533 +          // need to keep the new index in Java byte order.
  1.1534 +          Bytes::put_Java_u2(p, new_index);
  1.1535 +        }
  1.1536 +      } break;
  1.1537 +    }
  1.1538 +  } // end for each bytecode
  1.1539 +} // end rewrite_cp_refs_in_method()
  1.1540 +
  1.1541 +
  1.1542 +// Rewrite constant pool references in the class_annotations field.
  1.1543 +bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
  1.1544 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1545 +
  1.1546 +  typeArrayHandle class_annotations(THREAD,
  1.1547 +    scratch_class->class_annotations());
  1.1548 +  if (class_annotations.is_null() || class_annotations->length() == 0) {
  1.1549 +    // no class_annotations so nothing to do
  1.1550 +    return true;
  1.1551 +  }
  1.1552 +
  1.1553 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1554 +    ("class_annotations length=%d", class_annotations->length()));
  1.1555 +
  1.1556 +  int byte_i = 0;  // byte index into class_annotations
  1.1557 +  return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
  1.1558 +           THREAD);
  1.1559 +}
  1.1560 +
  1.1561 +
  1.1562 +// Rewrite constant pool references in an annotations typeArray. This
  1.1563 +// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
  1.1564 +// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
  1.1565 +//
  1.1566 +// annotations_typeArray {
  1.1567 +//   u2 num_annotations;
  1.1568 +//   annotation annotations[num_annotations];
  1.1569 +// }
  1.1570 +//
  1.1571 +bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
  1.1572 +       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1573 +
  1.1574 +  if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1575 +    // not enough room for num_annotations field
  1.1576 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1577 +      ("length() is too small for num_annotations field"));
  1.1578 +    return false;
  1.1579 +  }
  1.1580 +
  1.1581 +  u2 num_annotations = Bytes::get_Java_u2((address)
  1.1582 +                         annotations_typeArray->byte_at_addr(byte_i_ref));
  1.1583 +  byte_i_ref += 2;
  1.1584 +
  1.1585 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1586 +    ("num_annotations=%d", num_annotations));
  1.1587 +
  1.1588 +  int calc_num_annotations = 0;
  1.1589 +  for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
  1.1590 +    if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
  1.1591 +           byte_i_ref, THREAD)) {
  1.1592 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1593 +        ("bad annotation_struct at %d", calc_num_annotations));
  1.1594 +      // propogate failure back to caller
  1.1595 +      return false;
  1.1596 +    }
  1.1597 +  }
  1.1598 +  assert(num_annotations == calc_num_annotations, "sanity check");
  1.1599 +
  1.1600 +  return true;
  1.1601 +} // end rewrite_cp_refs_in_annotations_typeArray()
  1.1602 +
  1.1603 +
  1.1604 +// Rewrite constant pool references in the annotation struct portion of
  1.1605 +// an annotations_typeArray. This "structure" is from section 4.8.15 of
  1.1606 +// the 2nd-edition of the VM spec:
  1.1607 +//
  1.1608 +// struct annotation {
  1.1609 +//   u2 type_index;
  1.1610 +//   u2 num_element_value_pairs;
  1.1611 +//   {
  1.1612 +//     u2 element_name_index;
  1.1613 +//     element_value value;
  1.1614 +//   } element_value_pairs[num_element_value_pairs];
  1.1615 +// }
  1.1616 +//
  1.1617 +bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
  1.1618 +       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1619 +  if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
  1.1620 +    // not enough room for smallest annotation_struct
  1.1621 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1622 +      ("length() is too small for annotation_struct"));
  1.1623 +    return false;
  1.1624 +  }
  1.1625 +
  1.1626 +  u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
  1.1627 +                    byte_i_ref, "mapped old type_index=%d", THREAD);
  1.1628 +
  1.1629 +  u2 num_element_value_pairs = Bytes::get_Java_u2((address)
  1.1630 +                                 annotations_typeArray->byte_at_addr(
  1.1631 +                                 byte_i_ref));
  1.1632 +  byte_i_ref += 2;
  1.1633 +
  1.1634 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1635 +    ("type_index=%d  num_element_value_pairs=%d", type_index,
  1.1636 +    num_element_value_pairs));
  1.1637 +
  1.1638 +  int calc_num_element_value_pairs = 0;
  1.1639 +  for (; calc_num_element_value_pairs < num_element_value_pairs;
  1.1640 +       calc_num_element_value_pairs++) {
  1.1641 +    if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1642 +      // not enough room for another element_name_index, let alone
  1.1643 +      // the rest of another component
  1.1644 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1645 +        ("length() is too small for element_name_index"));
  1.1646 +      return false;
  1.1647 +    }
  1.1648 +
  1.1649 +    u2 element_name_index = rewrite_cp_ref_in_annotation_data(
  1.1650 +                              annotations_typeArray, byte_i_ref,
  1.1651 +                              "mapped old element_name_index=%d", THREAD);
  1.1652 +
  1.1653 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1654 +      ("element_name_index=%d", element_name_index));
  1.1655 +
  1.1656 +    if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
  1.1657 +           byte_i_ref, THREAD)) {
  1.1658 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1659 +        ("bad element_value at %d", calc_num_element_value_pairs));
  1.1660 +      // propogate failure back to caller
  1.1661 +      return false;
  1.1662 +    }
  1.1663 +  } // end for each component
  1.1664 +  assert(num_element_value_pairs == calc_num_element_value_pairs,
  1.1665 +    "sanity check");
  1.1666 +
  1.1667 +  return true;
  1.1668 +} // end rewrite_cp_refs_in_annotation_struct()
  1.1669 +
  1.1670 +
  1.1671 +// Rewrite a constant pool reference at the current position in
  1.1672 +// annotations_typeArray if needed. Returns the original constant
  1.1673 +// pool reference if a rewrite was not needed or the new constant
  1.1674 +// pool reference if a rewrite was needed.
  1.1675 +u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
  1.1676 +     typeArrayHandle annotations_typeArray, int &byte_i_ref,
  1.1677 +     const char * trace_mesg, TRAPS) {
  1.1678 +
  1.1679 +  address cp_index_addr = (address)
  1.1680 +    annotations_typeArray->byte_at_addr(byte_i_ref);
  1.1681 +  u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
  1.1682 +  u2 new_cp_index = find_new_index(old_cp_index);
  1.1683 +  if (new_cp_index != 0) {
  1.1684 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
  1.1685 +    Bytes::put_Java_u2(cp_index_addr, new_cp_index);
  1.1686 +    old_cp_index = new_cp_index;
  1.1687 +  }
  1.1688 +  byte_i_ref += 2;
  1.1689 +  return old_cp_index;
  1.1690 +}
  1.1691 +
  1.1692 +
  1.1693 +// Rewrite constant pool references in the element_value portion of an
  1.1694 +// annotations_typeArray. This "structure" is from section 4.8.15.1 of
  1.1695 +// the 2nd-edition of the VM spec:
  1.1696 +//
  1.1697 +// struct element_value {
  1.1698 +//   u1 tag;
  1.1699 +//   union {
  1.1700 +//     u2 const_value_index;
  1.1701 +//     {
  1.1702 +//       u2 type_name_index;
  1.1703 +//       u2 const_name_index;
  1.1704 +//     } enum_const_value;
  1.1705 +//     u2 class_info_index;
  1.1706 +//     annotation annotation_value;
  1.1707 +//     struct {
  1.1708 +//       u2 num_values;
  1.1709 +//       element_value values[num_values];
  1.1710 +//     } array_value;
  1.1711 +//   } value;
  1.1712 +// }
  1.1713 +//
  1.1714 +bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
  1.1715 +       typeArrayHandle annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1716 +
  1.1717 +  if ((byte_i_ref + 1) > annotations_typeArray->length()) {
  1.1718 +    // not enough room for a tag let alone the rest of an element_value
  1.1719 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1720 +      ("length() is too small for a tag"));
  1.1721 +    return false;
  1.1722 +  }
  1.1723 +
  1.1724 +  u1 tag = annotations_typeArray->byte_at(byte_i_ref);
  1.1725 +  byte_i_ref++;
  1.1726 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
  1.1727 +
  1.1728 +  switch (tag) {
  1.1729 +    // These BaseType tag values are from Table 4.2 in VM spec:
  1.1730 +    case 'B':  // byte
  1.1731 +    case 'C':  // char
  1.1732 +    case 'D':  // double
  1.1733 +    case 'F':  // float
  1.1734 +    case 'I':  // int
  1.1735 +    case 'J':  // long
  1.1736 +    case 'S':  // short
  1.1737 +    case 'Z':  // boolean
  1.1738 +
  1.1739 +    // The remaining tag values are from Table 4.8 in the 2nd-edition of
  1.1740 +    // the VM spec:
  1.1741 +    case 's':
  1.1742 +    {
  1.1743 +      // For the above tag values (including the BaseType values),
  1.1744 +      // value.const_value_index is right union field.
  1.1745 +
  1.1746 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1747 +        // not enough room for a const_value_index
  1.1748 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1749 +          ("length() is too small for a const_value_index"));
  1.1750 +        return false;
  1.1751 +      }
  1.1752 +
  1.1753 +      u2 const_value_index = rewrite_cp_ref_in_annotation_data(
  1.1754 +                               annotations_typeArray, byte_i_ref,
  1.1755 +                               "mapped old const_value_index=%d", THREAD);
  1.1756 +
  1.1757 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1758 +        ("const_value_index=%d", const_value_index));
  1.1759 +    } break;
  1.1760 +
  1.1761 +    case 'e':
  1.1762 +    {
  1.1763 +      // for the above tag value, value.enum_const_value is right union field
  1.1764 +
  1.1765 +      if ((byte_i_ref + 4) > annotations_typeArray->length()) {
  1.1766 +        // not enough room for a enum_const_value
  1.1767 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1768 +          ("length() is too small for a enum_const_value"));
  1.1769 +        return false;
  1.1770 +      }
  1.1771 +
  1.1772 +      u2 type_name_index = rewrite_cp_ref_in_annotation_data(
  1.1773 +                             annotations_typeArray, byte_i_ref,
  1.1774 +                             "mapped old type_name_index=%d", THREAD);
  1.1775 +
  1.1776 +      u2 const_name_index = rewrite_cp_ref_in_annotation_data(
  1.1777 +                              annotations_typeArray, byte_i_ref,
  1.1778 +                              "mapped old const_name_index=%d", THREAD);
  1.1779 +
  1.1780 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1781 +        ("type_name_index=%d  const_name_index=%d", type_name_index,
  1.1782 +        const_name_index));
  1.1783 +    } break;
  1.1784 +
  1.1785 +    case 'c':
  1.1786 +    {
  1.1787 +      // for the above tag value, value.class_info_index is right union field
  1.1788 +
  1.1789 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1790 +        // not enough room for a class_info_index
  1.1791 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1792 +          ("length() is too small for a class_info_index"));
  1.1793 +        return false;
  1.1794 +      }
  1.1795 +
  1.1796 +      u2 class_info_index = rewrite_cp_ref_in_annotation_data(
  1.1797 +                              annotations_typeArray, byte_i_ref,
  1.1798 +                              "mapped old class_info_index=%d", THREAD);
  1.1799 +
  1.1800 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1801 +        ("class_info_index=%d", class_info_index));
  1.1802 +    } break;
  1.1803 +
  1.1804 +    case '@':
  1.1805 +      // For the above tag value, value.attr_value is the right union
  1.1806 +      // field. This is a nested annotation.
  1.1807 +      if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
  1.1808 +             byte_i_ref, THREAD)) {
  1.1809 +        // propogate failure back to caller
  1.1810 +        return false;
  1.1811 +      }
  1.1812 +      break;
  1.1813 +
  1.1814 +    case '[':
  1.1815 +    {
  1.1816 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1817 +        // not enough room for a num_values field
  1.1818 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1819 +          ("length() is too small for a num_values field"));
  1.1820 +        return false;
  1.1821 +      }
  1.1822 +
  1.1823 +      // For the above tag value, value.array_value is the right union
  1.1824 +      // field. This is an array of nested element_value.
  1.1825 +      u2 num_values = Bytes::get_Java_u2((address)
  1.1826 +                        annotations_typeArray->byte_at_addr(byte_i_ref));
  1.1827 +      byte_i_ref += 2;
  1.1828 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
  1.1829 +
  1.1830 +      int calc_num_values = 0;
  1.1831 +      for (; calc_num_values < num_values; calc_num_values++) {
  1.1832 +        if (!rewrite_cp_refs_in_element_value(
  1.1833 +               annotations_typeArray, byte_i_ref, THREAD)) {
  1.1834 +          RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1835 +            ("bad nested element_value at %d", calc_num_values));
  1.1836 +          // propogate failure back to caller
  1.1837 +          return false;
  1.1838 +        }
  1.1839 +      }
  1.1840 +      assert(num_values == calc_num_values, "sanity check");
  1.1841 +    } break;
  1.1842 +
  1.1843 +    default:
  1.1844 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
  1.1845 +      return false;
  1.1846 +  } // end decode tag field
  1.1847 +
  1.1848 +  return true;
  1.1849 +} // end rewrite_cp_refs_in_element_value()
  1.1850 +
  1.1851 +
  1.1852 +// Rewrite constant pool references in a fields_annotations field.
  1.1853 +bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
  1.1854 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1855 +
  1.1856 +  objArrayHandle fields_annotations(THREAD,
  1.1857 +    scratch_class->fields_annotations());
  1.1858 +
  1.1859 +  if (fields_annotations.is_null() || fields_annotations->length() == 0) {
  1.1860 +    // no fields_annotations so nothing to do
  1.1861 +    return true;
  1.1862 +  }
  1.1863 +
  1.1864 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1865 +    ("fields_annotations length=%d", fields_annotations->length()));
  1.1866 +
  1.1867 +  for (int i = 0; i < fields_annotations->length(); i++) {
  1.1868 +    typeArrayHandle field_annotations(THREAD,
  1.1869 +      (typeArrayOop)fields_annotations->obj_at(i));
  1.1870 +    if (field_annotations.is_null() || field_annotations->length() == 0) {
  1.1871 +      // this field does not have any annotations so skip it
  1.1872 +      continue;
  1.1873 +    }
  1.1874 +
  1.1875 +    int byte_i = 0;  // byte index into field_annotations
  1.1876 +    if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
  1.1877 +           THREAD)) {
  1.1878 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1879 +        ("bad field_annotations at %d", i));
  1.1880 +      // propogate failure back to caller
  1.1881 +      return false;
  1.1882 +    }
  1.1883 +  }
  1.1884 +
  1.1885 +  return true;
  1.1886 +} // end rewrite_cp_refs_in_fields_annotations()
  1.1887 +
  1.1888 +
  1.1889 +// Rewrite constant pool references in a methods_annotations field.
  1.1890 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
  1.1891 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1892 +
  1.1893 +  objArrayHandle methods_annotations(THREAD,
  1.1894 +    scratch_class->methods_annotations());
  1.1895 +
  1.1896 +  if (methods_annotations.is_null() || methods_annotations->length() == 0) {
  1.1897 +    // no methods_annotations so nothing to do
  1.1898 +    return true;
  1.1899 +  }
  1.1900 +
  1.1901 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1902 +    ("methods_annotations length=%d", methods_annotations->length()));
  1.1903 +
  1.1904 +  for (int i = 0; i < methods_annotations->length(); i++) {
  1.1905 +    typeArrayHandle method_annotations(THREAD,
  1.1906 +      (typeArrayOop)methods_annotations->obj_at(i));
  1.1907 +    if (method_annotations.is_null() || method_annotations->length() == 0) {
  1.1908 +      // this method does not have any annotations so skip it
  1.1909 +      continue;
  1.1910 +    }
  1.1911 +
  1.1912 +    int byte_i = 0;  // byte index into method_annotations
  1.1913 +    if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
  1.1914 +           THREAD)) {
  1.1915 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1916 +        ("bad method_annotations at %d", i));
  1.1917 +      // propogate failure back to caller
  1.1918 +      return false;
  1.1919 +    }
  1.1920 +  }
  1.1921 +
  1.1922 +  return true;
  1.1923 +} // end rewrite_cp_refs_in_methods_annotations()
  1.1924 +
  1.1925 +
  1.1926 +// Rewrite constant pool references in a methods_parameter_annotations
  1.1927 +// field. This "structure" is adapted from the
  1.1928 +// RuntimeVisibleParameterAnnotations_attribute described in section
  1.1929 +// 4.8.17 of the 2nd-edition of the VM spec:
  1.1930 +//
  1.1931 +// methods_parameter_annotations_typeArray {
  1.1932 +//   u1 num_parameters;
  1.1933 +//   {
  1.1934 +//     u2 num_annotations;
  1.1935 +//     annotation annotations[num_annotations];
  1.1936 +//   } parameter_annotations[num_parameters];
  1.1937 +// }
  1.1938 +//
  1.1939 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
  1.1940 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1941 +
  1.1942 +  objArrayHandle methods_parameter_annotations(THREAD,
  1.1943 +    scratch_class->methods_parameter_annotations());
  1.1944 +
  1.1945 +  if (methods_parameter_annotations.is_null()
  1.1946 +      || methods_parameter_annotations->length() == 0) {
  1.1947 +    // no methods_parameter_annotations so nothing to do
  1.1948 +    return true;
  1.1949 +  }
  1.1950 +
  1.1951 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1952 +    ("methods_parameter_annotations length=%d",
  1.1953 +    methods_parameter_annotations->length()));
  1.1954 +
  1.1955 +  for (int i = 0; i < methods_parameter_annotations->length(); i++) {
  1.1956 +    typeArrayHandle method_parameter_annotations(THREAD,
  1.1957 +      (typeArrayOop)methods_parameter_annotations->obj_at(i));
  1.1958 +    if (method_parameter_annotations.is_null()
  1.1959 +        || method_parameter_annotations->length() == 0) {
  1.1960 +      // this method does not have any parameter annotations so skip it
  1.1961 +      continue;
  1.1962 +    }
  1.1963 +
  1.1964 +    if (method_parameter_annotations->length() < 1) {
  1.1965 +      // not enough room for a num_parameters field
  1.1966 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1967 +        ("length() is too small for a num_parameters field at %d", i));
  1.1968 +      return false;
  1.1969 +    }
  1.1970 +
  1.1971 +    int byte_i = 0;  // byte index into method_parameter_annotations
  1.1972 +
  1.1973 +    u1 num_parameters = method_parameter_annotations->byte_at(byte_i);
  1.1974 +    byte_i++;
  1.1975 +
  1.1976 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1977 +      ("num_parameters=%d", num_parameters));
  1.1978 +
  1.1979 +    int calc_num_parameters = 0;
  1.1980 +    for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
  1.1981 +      if (!rewrite_cp_refs_in_annotations_typeArray(
  1.1982 +             method_parameter_annotations, byte_i, THREAD)) {
  1.1983 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1984 +          ("bad method_parameter_annotations at %d", calc_num_parameters));
  1.1985 +        // propogate failure back to caller
  1.1986 +        return false;
  1.1987 +      }
  1.1988 +    }
  1.1989 +    assert(num_parameters == calc_num_parameters, "sanity check");
  1.1990 +  }
  1.1991 +
  1.1992 +  return true;
  1.1993 +} // end rewrite_cp_refs_in_methods_parameter_annotations()
  1.1994 +
  1.1995 +
  1.1996 +// Rewrite constant pool references in a methods_default_annotations
  1.1997 +// field. This "structure" is adapted from the AnnotationDefault_attribute
  1.1998 +// that is described in section 4.8.19 of the 2nd-edition of the VM spec:
  1.1999 +//
  1.2000 +// methods_default_annotations_typeArray {
  1.2001 +//   element_value default_value;
  1.2002 +// }
  1.2003 +//
  1.2004 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
  1.2005 +       instanceKlassHandle scratch_class, TRAPS) {
  1.2006 +
  1.2007 +  objArrayHandle methods_default_annotations(THREAD,
  1.2008 +    scratch_class->methods_default_annotations());
  1.2009 +
  1.2010 +  if (methods_default_annotations.is_null()
  1.2011 +      || methods_default_annotations->length() == 0) {
  1.2012 +    // no methods_default_annotations so nothing to do
  1.2013 +    return true;
  1.2014 +  }
  1.2015 +
  1.2016 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2017 +    ("methods_default_annotations length=%d",
  1.2018 +    methods_default_annotations->length()));
  1.2019 +
  1.2020 +  for (int i = 0; i < methods_default_annotations->length(); i++) {
  1.2021 +    typeArrayHandle method_default_annotations(THREAD,
  1.2022 +      (typeArrayOop)methods_default_annotations->obj_at(i));
  1.2023 +    if (method_default_annotations.is_null()
  1.2024 +        || method_default_annotations->length() == 0) {
  1.2025 +      // this method does not have any default annotations so skip it
  1.2026 +      continue;
  1.2027 +    }
  1.2028 +
  1.2029 +    int byte_i = 0;  // byte index into method_default_annotations
  1.2030 +
  1.2031 +    if (!rewrite_cp_refs_in_element_value(
  1.2032 +           method_default_annotations, byte_i, THREAD)) {
  1.2033 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2034 +        ("bad default element_value at %d", i));
  1.2035 +      // propogate failure back to caller
  1.2036 +      return false;
  1.2037 +    }
  1.2038 +  }
  1.2039 +
  1.2040 +  return true;
  1.2041 +} // end rewrite_cp_refs_in_methods_default_annotations()
  1.2042 +
  1.2043 +
  1.2044 +// Rewrite constant pool references in the method's stackmap table.
  1.2045 +// These "structures" are adapted from the StackMapTable_attribute that
  1.2046 +// is described in section 4.8.4 of the 6.0 version of the VM spec
  1.2047 +// (dated 2005.10.26):
  1.2048 +// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
  1.2049 +//
  1.2050 +// stack_map {
  1.2051 +//   u2 number_of_entries;
  1.2052 +//   stack_map_frame entries[number_of_entries];
  1.2053 +// }
  1.2054 +//
  1.2055 +void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
  1.2056 +       methodHandle method, TRAPS) {
  1.2057 +
  1.2058 +  if (!method->has_stackmap_table()) {
  1.2059 +    return;
  1.2060 +  }
  1.2061 +
  1.2062 +  typeArrayOop stackmap_data = method->stackmap_data();
  1.2063 +  address stackmap_p = (address)stackmap_data->byte_at_addr(0);
  1.2064 +  address stackmap_end = stackmap_p + stackmap_data->length();
  1.2065 +
  1.2066 +  assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
  1.2067 +  u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
  1.2068 +  stackmap_p += 2;
  1.2069 +
  1.2070 +  RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2071 +    ("number_of_entries=%u", number_of_entries));
  1.2072 +
  1.2073 +  // walk through each stack_map_frame
  1.2074 +  u2 calc_number_of_entries = 0;
  1.2075 +  for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
  1.2076 +    // The stack_map_frame structure is a u1 frame_type followed by
  1.2077 +    // 0 or more bytes of data:
  1.2078 +    //
  1.2079 +    // union stack_map_frame {
  1.2080 +    //   same_frame;
  1.2081 +    //   same_locals_1_stack_item_frame;
  1.2082 +    //   same_locals_1_stack_item_frame_extended;
  1.2083 +    //   chop_frame;
  1.2084 +    //   same_frame_extended;
  1.2085 +    //   append_frame;
  1.2086 +    //   full_frame;
  1.2087 +    // }
  1.2088 +
  1.2089 +    assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
  1.2090 +    // The Linux compiler does not like frame_type to be u1 or u2. It
  1.2091 +    // issues the following warning for the first if-statement below:
  1.2092 +    //
  1.2093 +    // "warning: comparison is always true due to limited range of data type"
  1.2094 +    //
  1.2095 +    u4 frame_type = *stackmap_p;
  1.2096 +    stackmap_p++;
  1.2097 +
  1.2098 +    // same_frame {
  1.2099 +    //   u1 frame_type = SAME; /* 0-63 */
  1.2100 +    // }
  1.2101 +    if (frame_type >= 0 && frame_type <= 63) {
  1.2102 +      // nothing more to do for same_frame
  1.2103 +    }
  1.2104 +
  1.2105 +    // same_locals_1_stack_item_frame {
  1.2106 +    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
  1.2107 +    //   verification_type_info stack[1];
  1.2108 +    // }
  1.2109 +    else if (frame_type >= 64 && frame_type <= 127) {
  1.2110 +      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2111 +        calc_number_of_entries, frame_type, THREAD);
  1.2112 +    }
  1.2113 +
  1.2114 +    // reserved for future use
  1.2115 +    else if (frame_type >= 128 && frame_type <= 246) {
  1.2116 +      // nothing more to do for reserved frame_types
  1.2117 +    }
  1.2118 +
  1.2119 +    // same_locals_1_stack_item_frame_extended {
  1.2120 +    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
  1.2121 +    //   u2 offset_delta;
  1.2122 +    //   verification_type_info stack[1];
  1.2123 +    // }
  1.2124 +    else if (frame_type == 247) {
  1.2125 +      stackmap_p += 2;
  1.2126 +      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2127 +        calc_number_of_entries, frame_type, THREAD);
  1.2128 +    }
  1.2129 +
  1.2130 +    // chop_frame {
  1.2131 +    //   u1 frame_type = CHOP; /* 248-250 */
  1.2132 +    //   u2 offset_delta;
  1.2133 +    // }
  1.2134 +    else if (frame_type >= 248 && frame_type <= 250) {
  1.2135 +      stackmap_p += 2;
  1.2136 +    }
  1.2137 +
  1.2138 +    // same_frame_extended {
  1.2139 +    //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
  1.2140 +    //   u2 offset_delta;
  1.2141 +    // }
  1.2142 +    else if (frame_type == 251) {
  1.2143 +      stackmap_p += 2;
  1.2144 +    }
  1.2145 +
  1.2146 +    // append_frame {
  1.2147 +    //   u1 frame_type = APPEND; /* 252-254 */
  1.2148 +    //   u2 offset_delta;
  1.2149 +    //   verification_type_info locals[frame_type - 251];
  1.2150 +    // }
  1.2151 +    else if (frame_type >= 252 && frame_type <= 254) {
  1.2152 +      assert(stackmap_p + 2 <= stackmap_end,
  1.2153 +        "no room for offset_delta");
  1.2154 +      stackmap_p += 2;
  1.2155 +      u1 len = frame_type - 251;
  1.2156 +      for (u1 i = 0; i < len; i++) {
  1.2157 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2158 +          calc_number_of_entries, frame_type, THREAD);
  1.2159 +      }
  1.2160 +    }
  1.2161 +
  1.2162 +    // full_frame {
  1.2163 +    //   u1 frame_type = FULL_FRAME; /* 255 */
  1.2164 +    //   u2 offset_delta;
  1.2165 +    //   u2 number_of_locals;
  1.2166 +    //   verification_type_info locals[number_of_locals];
  1.2167 +    //   u2 number_of_stack_items;
  1.2168 +    //   verification_type_info stack[number_of_stack_items];
  1.2169 +    // }
  1.2170 +    else if (frame_type == 255) {
  1.2171 +      assert(stackmap_p + 2 + 2 <= stackmap_end,
  1.2172 +        "no room for smallest full_frame");
  1.2173 +      stackmap_p += 2;
  1.2174 +
  1.2175 +      u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
  1.2176 +      stackmap_p += 2;
  1.2177 +
  1.2178 +      for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
  1.2179 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2180 +          calc_number_of_entries, frame_type, THREAD);
  1.2181 +      }
  1.2182 +
  1.2183 +      // Use the largest size for the number_of_stack_items, but only get
  1.2184 +      // the right number of bytes.
  1.2185 +      u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
  1.2186 +      stackmap_p += 2;
  1.2187 +
  1.2188 +      for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
  1.2189 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2190 +          calc_number_of_entries, frame_type, THREAD);
  1.2191 +      }
  1.2192 +    }
  1.2193 +  } // end while there is a stack_map_frame
  1.2194 +  assert(number_of_entries == calc_number_of_entries, "sanity check");
  1.2195 +} // end rewrite_cp_refs_in_stack_map_table()
  1.2196 +
  1.2197 +
  1.2198 +// Rewrite constant pool references in the verification type info
  1.2199 +// portion of the method's stackmap table. These "structures" are
  1.2200 +// adapted from the StackMapTable_attribute that is described in
  1.2201 +// section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
  1.2202 +// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
  1.2203 +//
  1.2204 +// The verification_type_info structure is a u1 tag followed by 0 or
  1.2205 +// more bytes of data:
  1.2206 +//
  1.2207 +// union verification_type_info {
  1.2208 +//   Top_variable_info;
  1.2209 +//   Integer_variable_info;
  1.2210 +//   Float_variable_info;
  1.2211 +//   Long_variable_info;
  1.2212 +//   Double_variable_info;
  1.2213 +//   Null_variable_info;
  1.2214 +//   UninitializedThis_variable_info;
  1.2215 +//   Object_variable_info;
  1.2216 +//   Uninitialized_variable_info;
  1.2217 +// }
  1.2218 +//
  1.2219 +void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
  1.2220 +       address& stackmap_p_ref, address stackmap_end, u2 frame_i,
  1.2221 +       u1 frame_type, TRAPS) {
  1.2222 +
  1.2223 +  assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
  1.2224 +  u1 tag = *stackmap_p_ref;
  1.2225 +  stackmap_p_ref++;
  1.2226 +
  1.2227 +  switch (tag) {
  1.2228 +  // Top_variable_info {
  1.2229 +  //   u1 tag = ITEM_Top; /* 0 */
  1.2230 +  // }
  1.2231 +  // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
  1.2232 +  case 0:  // fall through
  1.2233 +
  1.2234 +  // Integer_variable_info {
  1.2235 +  //   u1 tag = ITEM_Integer; /* 1 */
  1.2236 +  // }
  1.2237 +  case ITEM_Integer:  // fall through
  1.2238 +
  1.2239 +  // Float_variable_info {
  1.2240 +  //   u1 tag = ITEM_Float; /* 2 */
  1.2241 +  // }
  1.2242 +  case ITEM_Float:  // fall through
  1.2243 +
  1.2244 +  // Double_variable_info {
  1.2245 +  //   u1 tag = ITEM_Double; /* 3 */
  1.2246 +  // }
  1.2247 +  case ITEM_Double:  // fall through
  1.2248 +
  1.2249 +  // Long_variable_info {
  1.2250 +  //   u1 tag = ITEM_Long; /* 4 */
  1.2251 +  // }
  1.2252 +  case ITEM_Long:  // fall through
  1.2253 +
  1.2254 +  // Null_variable_info {
  1.2255 +  //   u1 tag = ITEM_Null; /* 5 */
  1.2256 +  // }
  1.2257 +  case ITEM_Null:  // fall through
  1.2258 +
  1.2259 +  // UninitializedThis_variable_info {
  1.2260 +  //   u1 tag = ITEM_UninitializedThis; /* 6 */
  1.2261 +  // }
  1.2262 +  case ITEM_UninitializedThis:
  1.2263 +    // nothing more to do for the above tag types
  1.2264 +    break;
  1.2265 +
  1.2266 +  // Object_variable_info {
  1.2267 +  //   u1 tag = ITEM_Object; /* 7 */
  1.2268 +  //   u2 cpool_index;
  1.2269 +  // }
  1.2270 +  case ITEM_Object:
  1.2271 +  {
  1.2272 +    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
  1.2273 +    u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
  1.2274 +    u2 new_cp_index = find_new_index(cpool_index);
  1.2275 +    if (new_cp_index != 0) {
  1.2276 +      RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2277 +        ("mapped old cpool_index=%d", cpool_index));
  1.2278 +      Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
  1.2279 +      cpool_index = new_cp_index;
  1.2280 +    }
  1.2281 +    stackmap_p_ref += 2;
  1.2282 +
  1.2283 +    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2284 +      ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
  1.2285 +      frame_type, cpool_index));
  1.2286 +  } break;
  1.2287 +
  1.2288 +  // Uninitialized_variable_info {
  1.2289 +  //   u1 tag = ITEM_Uninitialized; /* 8 */
  1.2290 +  //   u2 offset;
  1.2291 +  // }
  1.2292 +  case ITEM_Uninitialized:
  1.2293 +    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
  1.2294 +    stackmap_p_ref += 2;
  1.2295 +    break;
  1.2296 +
  1.2297 +  default:
  1.2298 +    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2299 +      ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
  1.2300 +    ShouldNotReachHere();
  1.2301 +    break;
  1.2302 +  } // end switch (tag)
  1.2303 +} // end rewrite_cp_refs_in_verification_type_info()
  1.2304 +
  1.2305 +
  1.2306 +// Change the constant pool associated with klass scratch_class to
  1.2307 +// scratch_cp. If shrink is true, then scratch_cp_length elements
  1.2308 +// are copied from scratch_cp to a smaller constant pool and the
  1.2309 +// smaller constant pool is associated with scratch_class.
  1.2310 +void VM_RedefineClasses::set_new_constant_pool(
  1.2311 +       instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
  1.2312 +       int scratch_cp_length, bool shrink, TRAPS) {
  1.2313 +  assert(!shrink || scratch_cp->length() >= scratch_cp_length, "sanity check");
  1.2314 +
  1.2315 +  if (shrink) {
  1.2316 +    // scratch_cp is a merged constant pool and has enough space for a
  1.2317 +    // worst case merge situation. We want to associate the minimum
  1.2318 +    // sized constant pool with the klass to save space.
  1.2319 +    constantPoolHandle smaller_cp(THREAD,
  1.2320 +      oopFactory::new_constantPool(scratch_cp_length, THREAD));
  1.2321 +    // preserve orig_length() value in the smaller copy
  1.2322 +    int orig_length = scratch_cp->orig_length();
  1.2323 +    assert(orig_length != 0, "sanity check");
  1.2324 +    smaller_cp->set_orig_length(orig_length);
  1.2325 +    scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
  1.2326 +    scratch_cp = smaller_cp;
  1.2327 +  }
  1.2328 +
  1.2329 +  // attach new constant pool to klass
  1.2330 +  scratch_cp->set_pool_holder(scratch_class());
  1.2331 +
  1.2332 +  // attach klass to new constant pool
  1.2333 +  scratch_class->set_constants(scratch_cp());
  1.2334 +
  1.2335 +  int i;  // for portability
  1.2336 +
  1.2337 +  // update each field in klass to use new constant pool indices as needed
  1.2338 +  typeArrayHandle fields(THREAD, scratch_class->fields());
  1.2339 +  int n_fields = fields->length();
  1.2340 +  for (i = 0; i < n_fields; i += instanceKlass::next_offset) {
  1.2341 +    jshort cur_index = fields->short_at(i + instanceKlass::name_index_offset);
  1.2342 +    jshort new_index = find_new_index(cur_index);
  1.2343 +    if (new_index != 0) {
  1.2344 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2345 +        ("field-name_index change: %d to %d", cur_index, new_index));
  1.2346 +      fields->short_at_put(i + instanceKlass::name_index_offset, new_index);
  1.2347 +    }
  1.2348 +    cur_index = fields->short_at(i + instanceKlass::signature_index_offset);
  1.2349 +    new_index = find_new_index(cur_index);
  1.2350 +    if (new_index != 0) {
  1.2351 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2352 +        ("field-signature_index change: %d to %d", cur_index, new_index));
  1.2353 +      fields->short_at_put(i + instanceKlass::signature_index_offset,
  1.2354 +        new_index);
  1.2355 +    }
  1.2356 +    cur_index = fields->short_at(i + instanceKlass::initval_index_offset);
  1.2357 +    new_index = find_new_index(cur_index);
  1.2358 +    if (new_index != 0) {
  1.2359 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2360 +        ("field-initval_index change: %d to %d", cur_index, new_index));
  1.2361 +      fields->short_at_put(i + instanceKlass::initval_index_offset, new_index);
  1.2362 +    }
  1.2363 +    cur_index = fields->short_at(i + instanceKlass::generic_signature_offset);
  1.2364 +    new_index = find_new_index(cur_index);
  1.2365 +    if (new_index != 0) {
  1.2366 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2367 +        ("field-generic_signature change: %d to %d", cur_index, new_index));
  1.2368 +      fields->short_at_put(i + instanceKlass::generic_signature_offset,
  1.2369 +        new_index);
  1.2370 +    }
  1.2371 +  } // end for each field
  1.2372 +
  1.2373 +  // Update constant pool indices in the inner classes info to use
  1.2374 +  // new constant indices as needed. The inner classes info is a
  1.2375 +  // quadruple:
  1.2376 +  // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
  1.2377 +  typeArrayOop inner_class_list = scratch_class->inner_classes();
  1.2378 +  int icl_length = (inner_class_list == NULL) ? 0 : inner_class_list->length();
  1.2379 +  if (icl_length > 0) {
  1.2380 +    typeArrayHandle inner_class_list_h(THREAD, inner_class_list);
  1.2381 +    for (int i = 0; i < icl_length;
  1.2382 +         i += instanceKlass::inner_class_next_offset) {
  1.2383 +      int cur_index = inner_class_list_h->ushort_at(i
  1.2384 +                        + instanceKlass::inner_class_inner_class_info_offset);
  1.2385 +      if (cur_index == 0) {
  1.2386 +        continue;  // JVM spec. allows null inner class refs so skip it
  1.2387 +      }
  1.2388 +      int new_index = find_new_index(cur_index);
  1.2389 +      if (new_index != 0) {
  1.2390 +        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2391 +          ("inner_class_info change: %d to %d", cur_index, new_index));
  1.2392 +        inner_class_list_h->ushort_at_put(i
  1.2393 +          + instanceKlass::inner_class_inner_class_info_offset, new_index);
  1.2394 +      }
  1.2395 +      cur_index = inner_class_list_h->ushort_at(i
  1.2396 +                    + instanceKlass::inner_class_outer_class_info_offset);
  1.2397 +      new_index = find_new_index(cur_index);
  1.2398 +      if (new_index != 0) {
  1.2399 +        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2400 +          ("outer_class_info change: %d to %d", cur_index, new_index));
  1.2401 +        inner_class_list_h->ushort_at_put(i
  1.2402 +          + instanceKlass::inner_class_outer_class_info_offset, new_index);
  1.2403 +      }
  1.2404 +      cur_index = inner_class_list_h->ushort_at(i
  1.2405 +                    + instanceKlass::inner_class_inner_name_offset);
  1.2406 +      new_index = find_new_index(cur_index);
  1.2407 +      if (new_index != 0) {
  1.2408 +        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2409 +          ("inner_name change: %d to %d", cur_index, new_index));
  1.2410 +        inner_class_list_h->ushort_at_put(i
  1.2411 +          + instanceKlass::inner_class_inner_name_offset, new_index);
  1.2412 +      }
  1.2413 +    } // end for each inner class
  1.2414 +  } // end if we have inner classes
  1.2415 +
  1.2416 +  // Attach each method in klass to the new constant pool and update
  1.2417 +  // to use new constant pool indices as needed:
  1.2418 +  objArrayHandle methods(THREAD, scratch_class->methods());
  1.2419 +  for (i = methods->length() - 1; i >= 0; i--) {
  1.2420 +    methodHandle method(THREAD, (methodOop)methods->obj_at(i));
  1.2421 +    method->set_constants(scratch_cp());
  1.2422 +
  1.2423 +    int new_index = find_new_index(method->name_index());
  1.2424 +    if (new_index != 0) {
  1.2425 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2426 +        ("method-name_index change: %d to %d", method->name_index(),
  1.2427 +        new_index));
  1.2428 +      method->set_name_index(new_index);
  1.2429 +    }
  1.2430 +    new_index = find_new_index(method->signature_index());
  1.2431 +    if (new_index != 0) {
  1.2432 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2433 +        ("method-signature_index change: %d to %d",
  1.2434 +        method->signature_index(), new_index));
  1.2435 +      method->set_signature_index(new_index);
  1.2436 +    }
  1.2437 +    new_index = find_new_index(method->generic_signature_index());
  1.2438 +    if (new_index != 0) {
  1.2439 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2440 +        ("method-generic_signature_index change: %d to %d",
  1.2441 +        method->generic_signature_index(), new_index));
  1.2442 +      method->set_generic_signature_index(new_index);
  1.2443 +    }
  1.2444 +
  1.2445 +    // Update constant pool indices in the method's checked exception
  1.2446 +    // table to use new constant indices as needed.
  1.2447 +    int cext_length = method->checked_exceptions_length();
  1.2448 +    if (cext_length > 0) {
  1.2449 +      CheckedExceptionElement * cext_table =
  1.2450 +        method->checked_exceptions_start();
  1.2451 +      for (int j = 0; j < cext_length; j++) {
  1.2452 +        int cur_index = cext_table[j].class_cp_index;
  1.2453 +        int new_index = find_new_index(cur_index);
  1.2454 +        if (new_index != 0) {
  1.2455 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2456 +            ("cext-class_cp_index change: %d to %d", cur_index, new_index));
  1.2457 +          cext_table[j].class_cp_index = (u2)new_index;
  1.2458 +        }
  1.2459 +      } // end for each checked exception table entry
  1.2460 +    } // end if there are checked exception table entries
  1.2461 +
  1.2462 +    // Update each catch type index in the method's exception table
  1.2463 +    // to use new constant pool indices as needed. The exception table
  1.2464 +    // holds quadruple entries of the form:
  1.2465 +    //   (beg_bci, end_bci, handler_bci, klass_index)
  1.2466 +    const int beg_bci_offset     = 0;
  1.2467 +    const int end_bci_offset     = 1;
  1.2468 +    const int handler_bci_offset = 2;
  1.2469 +    const int klass_index_offset = 3;
  1.2470 +    const int entry_size         = 4;
  1.2471 +
  1.2472 +    typeArrayHandle ex_table (THREAD, method->exception_table());
  1.2473 +    int ext_length = ex_table->length();
  1.2474 +    assert(ext_length % entry_size == 0, "exception table format has changed");
  1.2475 +
  1.2476 +    for (int j = 0; j < ext_length; j += entry_size) {
  1.2477 +      int cur_index = ex_table->int_at(j + klass_index_offset);
  1.2478 +      int new_index = find_new_index(cur_index);
  1.2479 +      if (new_index != 0) {
  1.2480 +        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2481 +          ("ext-klass_index change: %d to %d", cur_index, new_index));
  1.2482 +        ex_table->int_at_put(j + klass_index_offset, new_index);
  1.2483 +      }
  1.2484 +    } // end for each exception table entry
  1.2485 +
  1.2486 +    // Update constant pool indices in the method's local variable
  1.2487 +    // table to use new constant indices as needed. The local variable
  1.2488 +    // table hold sextuple entries of the form:
  1.2489 +    // (start_pc, length, name_index, descriptor_index, signature_index, slot)
  1.2490 +    int lvt_length = method->localvariable_table_length();
  1.2491 +    if (lvt_length > 0) {
  1.2492 +      LocalVariableTableElement * lv_table =
  1.2493 +        method->localvariable_table_start();
  1.2494 +      for (int j = 0; j < lvt_length; j++) {
  1.2495 +        int cur_index = lv_table[j].name_cp_index;
  1.2496 +        int new_index = find_new_index(cur_index);
  1.2497 +        if (new_index != 0) {
  1.2498 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2499 +            ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
  1.2500 +          lv_table[j].name_cp_index = (u2)new_index;
  1.2501 +        }
  1.2502 +        cur_index = lv_table[j].descriptor_cp_index;
  1.2503 +        new_index = find_new_index(cur_index);
  1.2504 +        if (new_index != 0) {
  1.2505 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2506 +            ("lvt-descriptor_cp_index change: %d to %d", cur_index,
  1.2507 +            new_index));
  1.2508 +          lv_table[j].descriptor_cp_index = (u2)new_index;
  1.2509 +        }
  1.2510 +        cur_index = lv_table[j].signature_cp_index;
  1.2511 +        new_index = find_new_index(cur_index);
  1.2512 +        if (new_index != 0) {
  1.2513 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2514 +            ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
  1.2515 +          lv_table[j].signature_cp_index = (u2)new_index;
  1.2516 +        }
  1.2517 +      } // end for each local variable table entry
  1.2518 +    } // end if there are local variable table entries
  1.2519 +
  1.2520 +    rewrite_cp_refs_in_stack_map_table(method, THREAD);
  1.2521 +  } // end for each method
  1.2522 +} // end set_new_constant_pool()
  1.2523 +
  1.2524 +
  1.2525 +// Unevolving classes may point to methods of the_class directly
  1.2526 +// from their constant pool caches, itables, and/or vtables. We
  1.2527 +// use the SystemDictionary::classes_do() facility and this helper
  1.2528 +// to fix up these pointers.
  1.2529 +//
  1.2530 +// Note: We currently don't support updating the vtable in
  1.2531 +// arrayKlassOops. See Open Issues in jvmtiRedefineClasses.hpp.
  1.2532 +void VM_RedefineClasses::adjust_cpool_cache_and_vtable(klassOop k_oop,
  1.2533 +       oop initiating_loader, TRAPS) {
  1.2534 +  Klass *k = k_oop->klass_part();
  1.2535 +  if (k->oop_is_instance()) {
  1.2536 +    HandleMark hm(THREAD);
  1.2537 +    instanceKlass *ik = (instanceKlass *) k;
  1.2538 +
  1.2539 +    // HotSpot specific optimization! HotSpot does not currently
  1.2540 +    // support delegation from the bootstrap class loader to a
  1.2541 +    // user-defined class loader. This means that if the bootstrap
  1.2542 +    // class loader is the initiating class loader, then it will also
  1.2543 +    // be the defining class loader. This also means that classes
  1.2544 +    // loaded by the bootstrap class loader cannot refer to classes
  1.2545 +    // loaded by a user-defined class loader. Note: a user-defined
  1.2546 +    // class loader can delegate to the bootstrap class loader.
  1.2547 +    //
  1.2548 +    // If the current class being redefined has a user-defined class
  1.2549 +    // loader as its defining class loader, then we can skip all
  1.2550 +    // classes loaded by the bootstrap class loader.
  1.2551 +    bool is_user_defined =
  1.2552 +           instanceKlass::cast(_the_class_oop)->class_loader() != NULL;
  1.2553 +    if (is_user_defined && ik->class_loader() == NULL) {
  1.2554 +      return;
  1.2555 +    }
  1.2556 +
  1.2557 +    // This is a very busy routine. We don't want too much tracing
  1.2558 +    // printed out.
  1.2559 +    bool trace_name_printed = false;
  1.2560 +
  1.2561 +    // Very noisy: only enable this call if you are trying to determine
  1.2562 +    // that a specific class gets found by this routine.
  1.2563 +    // RC_TRACE macro has an embedded ResourceMark
  1.2564 +    // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
  1.2565 +    //   ("adjust check: name=%s", ik->external_name()));
  1.2566 +    // trace_name_printed = true;
  1.2567 +
  1.2568 +    // Fix the vtable embedded in the_class and subclasses of the_class,
  1.2569 +    // if one exists. We discard scratch_class and we don't keep an
  1.2570 +    // instanceKlass around to hold obsolete methods so we don't have
  1.2571 +    // any other instanceKlass embedded vtables to update. The vtable
  1.2572 +    // holds the methodOops for virtual (but not final) methods.
  1.2573 +    if (ik->vtable_length() > 0 && ik->is_subtype_of(_the_class_oop)) {
  1.2574 +      // ik->vtable() creates a wrapper object; rm cleans it up
  1.2575 +      ResourceMark rm(THREAD);
  1.2576 +      ik->vtable()->adjust_method_entries(_matching_old_methods,
  1.2577 +                                          _matching_new_methods,
  1.2578 +                                          _matching_methods_length,
  1.2579 +                                          &trace_name_printed);
  1.2580 +    }
  1.2581 +
  1.2582 +    // If the current class has an itable and we are either redefining an
  1.2583 +    // interface or if the current class is a subclass of the_class, then
  1.2584 +    // we potentially have to fix the itable. If we are redefining an
  1.2585 +    // interface, then we have to call adjust_method_entries() for
  1.2586 +    // every instanceKlass that has an itable since there isn't a
  1.2587 +    // subclass relationship between an interface and an instanceKlass.
  1.2588 +    if (ik->itable_length() > 0 && (Klass::cast(_the_class_oop)->is_interface()
  1.2589 +        || ik->is_subclass_of(_the_class_oop))) {
  1.2590 +      // ik->itable() creates a wrapper object; rm cleans it up
  1.2591 +      ResourceMark rm(THREAD);
  1.2592 +      ik->itable()->adjust_method_entries(_matching_old_methods,
  1.2593 +                                          _matching_new_methods,
  1.2594 +                                          _matching_methods_length,
  1.2595 +                                          &trace_name_printed);
  1.2596 +    }
  1.2597 +
  1.2598 +    // The constant pools in other classes (other_cp) can refer to
  1.2599 +    // methods in the_class. We have to update method information in
  1.2600 +    // other_cp's cache. If other_cp has a previous version, then we
  1.2601 +    // have to repeat the process for each previous version. The
  1.2602 +    // constant pool cache holds the methodOops for non-virtual
  1.2603 +    // methods and for virtual, final methods.
  1.2604 +    //
  1.2605 +    // Special case: if the current class is the_class, then new_cp
  1.2606 +    // has already been attached to the_class and old_cp has already
  1.2607 +    // been added as a previous version. The new_cp doesn't have any
  1.2608 +    // cached references to old methods so it doesn't need to be
  1.2609 +    // updated. We can simply start with the previous version(s) in
  1.2610 +    // that case.
  1.2611 +    constantPoolHandle other_cp;
  1.2612 +    constantPoolCacheOop cp_cache;
  1.2613 +
  1.2614 +    if (k_oop != _the_class_oop) {
  1.2615 +      // this klass' constant pool cache may need adjustment
  1.2616 +      other_cp = constantPoolHandle(ik->constants());
  1.2617 +      cp_cache = other_cp->cache();
  1.2618 +      if (cp_cache != NULL) {
  1.2619 +        cp_cache->adjust_method_entries(_matching_old_methods,
  1.2620 +                                        _matching_new_methods,
  1.2621 +                                        _matching_methods_length,
  1.2622 +                                        &trace_name_printed);
  1.2623 +      }
  1.2624 +    }
  1.2625 +    {
  1.2626 +      ResourceMark rm(THREAD);
  1.2627 +      // PreviousVersionInfo objects returned via PreviousVersionWalker
  1.2628 +      // contain a GrowableArray of handles. We have to clean up the
  1.2629 +      // GrowableArray _after_ the PreviousVersionWalker destructor
  1.2630 +      // has destroyed the handles.
  1.2631 +      {
  1.2632 +        // the previous versions' constant pool caches may need adjustment
  1.2633 +        PreviousVersionWalker pvw(ik);
  1.2634 +        for (PreviousVersionInfo * pv_info = pvw.next_previous_version();
  1.2635 +             pv_info != NULL; pv_info = pvw.next_previous_version()) {
  1.2636 +          other_cp = pv_info->prev_constant_pool_handle();
  1.2637 +          cp_cache = other_cp->cache();
  1.2638 +          if (cp_cache != NULL) {
  1.2639 +            cp_cache->adjust_method_entries(_matching_old_methods,
  1.2640 +                                            _matching_new_methods,
  1.2641 +                                            _matching_methods_length,
  1.2642 +                                            &trace_name_printed);
  1.2643 +          }
  1.2644 +        }
  1.2645 +      } // pvw is cleaned up
  1.2646 +    } // rm is cleaned up
  1.2647 +  }
  1.2648 +}
  1.2649 +
  1.2650 +void VM_RedefineClasses::update_jmethod_ids() {
  1.2651 +  for (int j = 0; j < _matching_methods_length; ++j) {
  1.2652 +    methodOop old_method = _matching_old_methods[j];
  1.2653 +    jmethodID jmid = old_method->find_jmethod_id_or_null();
  1.2654 +    if (jmid != NULL) {
  1.2655 +      // There is a jmethodID, change it to point to the new method
  1.2656 +      methodHandle new_method_h(_matching_new_methods[j]);
  1.2657 +      JNIHandles::change_method_associated_with_jmethod_id(jmid, new_method_h);
  1.2658 +      assert(JNIHandles::resolve_jmethod_id(jmid) == _matching_new_methods[j],
  1.2659 +             "should be replaced");
  1.2660 +    }
  1.2661 +  }
  1.2662 +}
  1.2663 +
  1.2664 +void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
  1.2665 +       BitMap *emcp_methods, int * emcp_method_count_p) {
  1.2666 +  *emcp_method_count_p = 0;
  1.2667 +  int obsolete_count = 0;
  1.2668 +  int old_index = 0;
  1.2669 +  for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
  1.2670 +    methodOop old_method = _matching_old_methods[j];
  1.2671 +    methodOop new_method = _matching_new_methods[j];
  1.2672 +    methodOop old_array_method;
  1.2673 +
  1.2674 +    // Maintain an old_index into the _old_methods array by skipping
  1.2675 +    // deleted methods
  1.2676 +    while ((old_array_method = (methodOop) _old_methods->obj_at(old_index))
  1.2677 +                                                            != old_method) {
  1.2678 +      ++old_index;
  1.2679 +    }
  1.2680 +
  1.2681 +    if (MethodComparator::methods_EMCP(old_method, new_method)) {
  1.2682 +      // The EMCP definition from JSR-163 requires the bytecodes to be
  1.2683 +      // the same with the exception of constant pool indices which may
  1.2684 +      // differ. However, the constants referred to by those indices
  1.2685 +      // must be the same.
  1.2686 +      //
  1.2687 +      // We use methods_EMCP() for comparison since constant pool
  1.2688 +      // merging can remove duplicate constant pool entries that were
  1.2689 +      // present in the old method and removed from the rewritten new
  1.2690 +      // method. A faster binary comparison function would consider the
  1.2691 +      // old and new methods to be different when they are actually
  1.2692 +      // EMCP.
  1.2693 +      //
  1.2694 +      // The old and new methods are EMCP and you would think that we
  1.2695 +      // could get rid of one of them here and now and save some space.
  1.2696 +      // However, the concept of EMCP only considers the bytecodes and
  1.2697 +      // the constant pool entries in the comparison. Other things,
  1.2698 +      // e.g., the line number table (LNT) or the local variable table
  1.2699 +      // (LVT) don't count in the comparison. So the new (and EMCP)
  1.2700 +      // method can have a new LNT that we need so we can't just
  1.2701 +      // overwrite the new method with the old method.
  1.2702 +      //
  1.2703 +      // When this routine is called, we have already attached the new
  1.2704 +      // methods to the_class so the old methods are effectively
  1.2705 +      // overwritten. However, if an old method is still executing,
  1.2706 +      // then the old method cannot be collected until sometime after
  1.2707 +      // the old method call has returned. So the overwriting of old
  1.2708 +      // methods by new methods will save us space except for those
  1.2709 +      // (hopefully few) old methods that are still executing.
  1.2710 +      //
  1.2711 +      // A method refers to a constMethodOop and this presents another
  1.2712 +      // possible avenue to space savings. The constMethodOop in the
  1.2713 +      // new method contains possibly new attributes (LNT, LVT, etc).
  1.2714 +      // At first glance, it seems possible to save space by replacing
  1.2715 +      // the constMethodOop in the old method with the constMethodOop
  1.2716 +      // from the new method. The old and new methods would share the
  1.2717 +      // same constMethodOop and we would save the space occupied by
  1.2718 +      // the old constMethodOop. However, the constMethodOop contains
  1.2719 +      // a back reference to the containing method. Sharing the
  1.2720 +      // constMethodOop between two methods could lead to confusion in
  1.2721 +      // the code that uses the back reference. This would lead to
  1.2722 +      // brittle code that could be broken in non-obvious ways now or
  1.2723 +      // in the future.
  1.2724 +      //
  1.2725 +      // Another possibility is to copy the constMethodOop from the new
  1.2726 +      // method to the old method and then overwrite the new method with
  1.2727 +      // the old method. Since the constMethodOop contains the bytecodes
  1.2728 +      // for the method embedded in the oop, this option would change
  1.2729 +      // the bytecodes out from under any threads executing the old
  1.2730 +      // method and make the thread's bcp invalid. Since EMCP requires
  1.2731 +      // that the bytecodes be the same modulo constant pool indices, it
  1.2732 +      // is straight forward to compute the correct new bcp in the new
  1.2733 +      // constMethodOop from the old bcp in the old constMethodOop. The
  1.2734 +      // time consuming part would be searching all the frames in all
  1.2735 +      // of the threads to find all of the calls to the old method.
  1.2736 +      //
  1.2737 +      // It looks like we will have to live with the limited savings
  1.2738 +      // that we get from effectively overwriting the old methods
  1.2739 +      // when the new methods are attached to the_class.
  1.2740 +
  1.2741 +      // track which methods are EMCP for add_previous_version() call
  1.2742 +      emcp_methods->set_bit(old_index);
  1.2743 +      (*emcp_method_count_p)++;
  1.2744 +
  1.2745 +      // An EMCP method is _not_ obsolete. An obsolete method has a
  1.2746 +      // different jmethodID than the current method. An EMCP method
  1.2747 +      // has the same jmethodID as the current method. Having the
  1.2748 +      // same jmethodID for all EMCP versions of a method allows for
  1.2749 +      // a consistent view of the EMCP methods regardless of which
  1.2750 +      // EMCP method you happen to have in hand. For example, a
  1.2751 +      // breakpoint set in one EMCP method will work for all EMCP
  1.2752 +      // versions of the method including the current one.
  1.2753 +    } else {
  1.2754 +      // mark obsolete methods as such
  1.2755 +      old_method->set_is_obsolete();
  1.2756 +      obsolete_count++;
  1.2757 +
  1.2758 +      // obsolete methods need a unique idnum
  1.2759 +      u2 num = instanceKlass::cast(_the_class_oop)->next_method_idnum();
  1.2760 +      if (num != constMethodOopDesc::UNSET_IDNUM) {
  1.2761 +//      u2 old_num = old_method->method_idnum();
  1.2762 +        old_method->set_method_idnum(num);
  1.2763 +// TO DO: attach obsolete annotations to obsolete method's new idnum
  1.2764 +      }
  1.2765 +      // With tracing we try not to "yack" too much. The position of
  1.2766 +      // this trace assumes there are fewer obsolete methods than
  1.2767 +      // EMCP methods.
  1.2768 +      RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
  1.2769 +        old_method->name()->as_C_string(),
  1.2770 +        old_method->signature()->as_C_string()));
  1.2771 +    }
  1.2772 +    old_method->set_is_old();
  1.2773 +  }
  1.2774 +  for (int i = 0; i < _deleted_methods_length; ++i) {
  1.2775 +    methodOop old_method = _deleted_methods[i];
  1.2776 +
  1.2777 +    assert(old_method->vtable_index() < 0,
  1.2778 +           "cannot delete methods with vtable entries");;
  1.2779 +
  1.2780 +    // Mark all deleted methods as old and obsolete
  1.2781 +    old_method->set_is_old();
  1.2782 +    old_method->set_is_obsolete();
  1.2783 +    ++obsolete_count;
  1.2784 +    // With tracing we try not to "yack" too much. The position of
  1.2785 +    // this trace assumes there are fewer obsolete methods than
  1.2786 +    // EMCP methods.
  1.2787 +    RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
  1.2788 +                          old_method->name()->as_C_string(),
  1.2789 +                          old_method->signature()->as_C_string()));
  1.2790 +  }
  1.2791 +  assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
  1.2792 +    "sanity check");
  1.2793 +  RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
  1.2794 +    obsolete_count));
  1.2795 +}
  1.2796 +
  1.2797 +// This internal class transfers the native function registration from old methods
  1.2798 +// to new methods.  It is designed to handle both the simple case of unchanged
  1.2799 +// native methods and the complex cases of native method prefixes being added and/or
  1.2800 +// removed.
  1.2801 +// It expects only to be used during the VM_RedefineClasses op (a safepoint).
  1.2802 +//
  1.2803 +// This class is used after the new methods have been installed in "the_class".
  1.2804 +//
  1.2805 +// So, for example, the following must be handled.  Where 'm' is a method and
  1.2806 +// a number followed by an underscore is a prefix.
  1.2807 +//
  1.2808 +//                                      Old Name    New Name
  1.2809 +// Simple transfer to new method        m       ->  m
  1.2810 +// Add prefix                           m       ->  1_m
  1.2811 +// Remove prefix                        1_m     ->  m
  1.2812 +// Simultaneous add of prefixes         m       ->  3_2_1_m
  1.2813 +// Simultaneous removal of prefixes     3_2_1_m ->  m
  1.2814 +// Simultaneous add and remove          1_m     ->  2_m
  1.2815 +// Same, caused by prefix removal only  3_2_1_m ->  3_2_m
  1.2816 +//
  1.2817 +class TransferNativeFunctionRegistration {
  1.2818 + private:
  1.2819 +  instanceKlassHandle the_class;
  1.2820 +  int prefix_count;
  1.2821 +  char** prefixes;
  1.2822 +
  1.2823 +  // Recursively search the binary tree of possibly prefixed method names.
  1.2824 +  // Iteration could be used if all agents were well behaved. Full tree walk is
  1.2825 +  // more resilent to agents not cleaning up intermediate methods.
  1.2826 +  // Branch at each depth in the binary tree is:
  1.2827 +  //    (1) without the prefix.
  1.2828 +  //    (2) with the prefix.
  1.2829 +  // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
  1.2830 +  methodOop search_prefix_name_space(int depth, char* name_str, size_t name_len,
  1.2831 +                                     symbolOop signature) {
  1.2832 +    symbolOop name_symbol = SymbolTable::probe(name_str, (int)name_len);
  1.2833 +    if (name_symbol != NULL) {
  1.2834 +      methodOop method = Klass::cast(the_class())->lookup_method(name_symbol, signature);
  1.2835 +      if (method != NULL) {
  1.2836 +        // Even if prefixed, intermediate methods must exist.
  1.2837 +        if (method->is_native()) {
  1.2838 +          // Wahoo, we found a (possibly prefixed) version of the method, return it.
  1.2839 +          return method;
  1.2840 +        }
  1.2841 +        if (depth < prefix_count) {
  1.2842 +          // Try applying further prefixes (other than this one).
  1.2843 +          method = search_prefix_name_space(depth+1, name_str, name_len, signature);
  1.2844 +          if (method != NULL) {
  1.2845 +            return method; // found
  1.2846 +          }
  1.2847 +
  1.2848 +          // Try adding this prefix to the method name and see if it matches
  1.2849 +          // another method name.
  1.2850 +          char* prefix = prefixes[depth];
  1.2851 +          size_t prefix_len = strlen(prefix);
  1.2852 +          size_t trial_len = name_len + prefix_len;
  1.2853 +          char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
  1.2854 +          strcpy(trial_name_str, prefix);
  1.2855 +          strcat(trial_name_str, name_str);
  1.2856 +          method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
  1.2857 +                                            signature);
  1.2858 +          if (method != NULL) {
  1.2859 +            // If found along this branch, it was prefixed, mark as such
  1.2860 +            method->set_is_prefixed_native();
  1.2861 +            return method; // found
  1.2862 +          }
  1.2863 +        }
  1.2864 +      }
  1.2865 +    }
  1.2866 +    return NULL;  // This whole branch bore nothing
  1.2867 +  }
  1.2868 +
  1.2869 +  // Return the method name with old prefixes stripped away.
  1.2870 +  char* method_name_without_prefixes(methodOop method) {
  1.2871 +    symbolOop name = method->name();
  1.2872 +    char* name_str = name->as_utf8();
  1.2873 +
  1.2874 +    // Old prefixing may be defunct, strip prefixes, if any.
  1.2875 +    for (int i = prefix_count-1; i >= 0; i--) {
  1.2876 +      char* prefix = prefixes[i];
  1.2877 +      size_t prefix_len = strlen(prefix);
  1.2878 +      if (strncmp(prefix, name_str, prefix_len) == 0) {
  1.2879 +        name_str += prefix_len;
  1.2880 +      }
  1.2881 +    }
  1.2882 +    return name_str;
  1.2883 +  }
  1.2884 +
  1.2885 +  // Strip any prefixes off the old native method, then try to find a
  1.2886 +  // (possibly prefixed) new native that matches it.
  1.2887 +  methodOop strip_and_search_for_new_native(methodOop method) {
  1.2888 +    ResourceMark rm;
  1.2889 +    char* name_str = method_name_without_prefixes(method);
  1.2890 +    return search_prefix_name_space(0, name_str, strlen(name_str),
  1.2891 +                                    method->signature());
  1.2892 +  }
  1.2893 +
  1.2894 + public:
  1.2895 +
  1.2896 +  // Construct a native method transfer processor for this class.
  1.2897 +  TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
  1.2898 +    assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
  1.2899 +
  1.2900 +    the_class = _the_class;
  1.2901 +    prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
  1.2902 +  }
  1.2903 +
  1.2904 +  // Attempt to transfer any of the old or deleted methods that are native
  1.2905 +  void transfer_registrations(methodOop* old_methods, int methods_length) {
  1.2906 +    for (int j = 0; j < methods_length; j++) {
  1.2907 +      methodOop old_method = old_methods[j];
  1.2908 +
  1.2909 +      if (old_method->is_native() && old_method->has_native_function()) {
  1.2910 +        methodOop new_method = strip_and_search_for_new_native(old_method);
  1.2911 +        if (new_method != NULL) {
  1.2912 +          // Actually set the native function in the new method.
  1.2913 +          // Redefine does not send events (except CFLH), certainly not this
  1.2914 +          // behind the scenes re-registration.
  1.2915 +          new_method->set_native_function(old_method->native_function(),
  1.2916 +                              !methodOopDesc::native_bind_event_is_interesting);
  1.2917 +        }
  1.2918 +      }
  1.2919 +    }
  1.2920 +  }
  1.2921 +};
  1.2922 +
  1.2923 +// Don't lose the association between a native method and its JNI function.
  1.2924 +void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
  1.2925 +  TransferNativeFunctionRegistration transfer(the_class);
  1.2926 +  transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
  1.2927 +  transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
  1.2928 +}
  1.2929 +
  1.2930 +// Deoptimize all compiled code that depends on this class.
  1.2931 +//
  1.2932 +// If the can_redefine_classes capability is obtained in the onload
  1.2933 +// phase then the compiler has recorded all dependencies from startup.
  1.2934 +// In that case we need only deoptimize and throw away all compiled code
  1.2935 +// that depends on the class.
  1.2936 +//
  1.2937 +// If can_redefine_classes is obtained sometime after the onload
  1.2938 +// phase then the dependency information may be incomplete. In that case
  1.2939 +// the first call to RedefineClasses causes all compiled code to be
  1.2940 +// thrown away. As can_redefine_classes has been obtained then
  1.2941 +// all future compilations will record dependencies so second and
  1.2942 +// subsequent calls to RedefineClasses need only throw away code
  1.2943 +// that depends on the class.
  1.2944 +//
  1.2945 +void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
  1.2946 +  assert_locked_or_safepoint(Compile_lock);
  1.2947 +
  1.2948 +  // All dependencies have been recorded from startup or this is a second or
  1.2949 +  // subsequent use of RedefineClasses
  1.2950 +  if (JvmtiExport::all_dependencies_are_recorded()) {
  1.2951 +    Universe::flush_evol_dependents_on(k_h);
  1.2952 +  } else {
  1.2953 +    CodeCache::mark_all_nmethods_for_deoptimization();
  1.2954 +
  1.2955 +    ResourceMark rm(THREAD);
  1.2956 +    DeoptimizationMarker dm;
  1.2957 +
  1.2958 +    // Deoptimize all activations depending on marked nmethods
  1.2959 +    Deoptimization::deoptimize_dependents();
  1.2960 +
  1.2961 +    // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
  1.2962 +    CodeCache::make_marked_nmethods_not_entrant();
  1.2963 +
  1.2964 +    // From now on we know that the dependency information is complete
  1.2965 +    JvmtiExport::set_all_dependencies_are_recorded(true);
  1.2966 +  }
  1.2967 +}
  1.2968 +
  1.2969 +void VM_RedefineClasses::compute_added_deleted_matching_methods() {
  1.2970 +  methodOop old_method;
  1.2971 +  methodOop new_method;
  1.2972 +
  1.2973 +  _matching_old_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
  1.2974 +  _matching_new_methods = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
  1.2975 +  _added_methods        = NEW_RESOURCE_ARRAY(methodOop, _new_methods->length());
  1.2976 +  _deleted_methods      = NEW_RESOURCE_ARRAY(methodOop, _old_methods->length());
  1.2977 +
  1.2978 +  _matching_methods_length = 0;
  1.2979 +  _deleted_methods_length  = 0;
  1.2980 +  _added_methods_length    = 0;
  1.2981 +
  1.2982 +  int nj = 0;
  1.2983 +  int oj = 0;
  1.2984 +  while (true) {
  1.2985 +    if (oj >= _old_methods->length()) {
  1.2986 +      if (nj >= _new_methods->length()) {
  1.2987 +        break; // we've looked at everything, done
  1.2988 +      }
  1.2989 +      // New method at the end
  1.2990 +      new_method = (methodOop) _new_methods->obj_at(nj);
  1.2991 +      _added_methods[_added_methods_length++] = new_method;
  1.2992 +      ++nj;
  1.2993 +    } else if (nj >= _new_methods->length()) {
  1.2994 +      // Old method, at the end, is deleted
  1.2995 +      old_method = (methodOop) _old_methods->obj_at(oj);
  1.2996 +      _deleted_methods[_deleted_methods_length++] = old_method;
  1.2997 +      ++oj;
  1.2998 +    } else {
  1.2999 +      old_method = (methodOop) _old_methods->obj_at(oj);
  1.3000 +      new_method = (methodOop) _new_methods->obj_at(nj);
  1.3001 +      if (old_method->name() == new_method->name()) {
  1.3002 +        if (old_method->signature() == new_method->signature()) {
  1.3003 +          _matching_old_methods[_matching_methods_length  ] = old_method;
  1.3004 +          _matching_new_methods[_matching_methods_length++] = new_method;
  1.3005 +          ++nj;
  1.3006 +          ++oj;
  1.3007 +        } else {
  1.3008 +          // added overloaded have already been moved to the end,
  1.3009 +          // so this is a deleted overloaded method
  1.3010 +          _deleted_methods[_deleted_methods_length++] = old_method;
  1.3011 +          ++oj;
  1.3012 +        }
  1.3013 +      } else { // names don't match
  1.3014 +        if (old_method->name()->fast_compare(new_method->name()) > 0) {
  1.3015 +          // new method
  1.3016 +          _added_methods[_added_methods_length++] = new_method;
  1.3017 +          ++nj;
  1.3018 +        } else {
  1.3019 +          // deleted method
  1.3020 +          _deleted_methods[_deleted_methods_length++] = old_method;
  1.3021 +          ++oj;
  1.3022 +        }
  1.3023 +      }
  1.3024 +    }
  1.3025 +  }
  1.3026 +  assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
  1.3027 +  assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
  1.3028 +}
  1.3029 +
  1.3030 +
  1.3031 +
  1.3032 +// Install the redefinition of a class:
  1.3033 +//    - house keeping (flushing breakpoints and caches, deoptimizing
  1.3034 +//      dependent compiled code)
  1.3035 +//    - replacing parts in the_class with parts from scratch_class
  1.3036 +//    - adding a weak reference to track the obsolete but interesting
  1.3037 +//      parts of the_class
  1.3038 +//    - adjusting constant pool caches and vtables in other classes
  1.3039 +//      that refer to methods in the_class. These adjustments use the
  1.3040 +//      SystemDictionary::classes_do() facility which only allows
  1.3041 +//      a helper method to be specified. The interesting parameters
  1.3042 +//      that we would like to pass to the helper method are saved in
  1.3043 +//      static global fields in the VM operation.
  1.3044 +void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
  1.3045 +       instanceKlassHandle scratch_class, TRAPS) {
  1.3046 +
  1.3047 +  RC_TIMER_START(_timer_rsc_phase1);
  1.3048 +
  1.3049 +  oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
  1.3050 +  klassOop the_class_oop = java_lang_Class::as_klassOop(the_class_mirror);
  1.3051 +  instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
  1.3052 +
  1.3053 +#ifndef JVMTI_KERNEL
  1.3054 +  // Remove all breakpoints in methods of this class
  1.3055 +  JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
  1.3056 +  jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
  1.3057 +#endif // !JVMTI_KERNEL
  1.3058 +
  1.3059 +  if (the_class_oop == Universe::reflect_invoke_cache()->klass()) {
  1.3060 +    // We are redefining java.lang.reflect.Method. Method.invoke() is
  1.3061 +    // cached and users of the cache care about each active version of
  1.3062 +    // the method so we have to track this previous version.
  1.3063 +    // Do this before methods get switched
  1.3064 +    Universe::reflect_invoke_cache()->add_previous_version(
  1.3065 +      the_class->method_with_idnum(Universe::reflect_invoke_cache()->method_idnum()));
  1.3066 +  }
  1.3067 +
  1.3068 +  // Deoptimize all compiled code that depends on this class
  1.3069 +  flush_dependent_code(the_class, THREAD);
  1.3070 +
  1.3071 +  _old_methods = the_class->methods();
  1.3072 +  _new_methods = scratch_class->methods();
  1.3073 +  _the_class_oop = the_class_oop;
  1.3074 +  compute_added_deleted_matching_methods();
  1.3075 +  update_jmethod_ids();
  1.3076 +
  1.3077 +  // Attach new constant pool to the original klass. The original
  1.3078 +  // klass still refers to the old constant pool (for now).
  1.3079 +  scratch_class->constants()->set_pool_holder(the_class());
  1.3080 +
  1.3081 +#if 0
  1.3082 +  // In theory, with constant pool merging in place we should be able
  1.3083 +  // to save space by using the new, merged constant pool in place of
  1.3084 +  // the old constant pool(s). By "pool(s)" I mean the constant pool in
  1.3085 +  // the klass version we are replacing now and any constant pool(s) in
  1.3086 +  // previous versions of klass. Nice theory, doesn't work in practice.
  1.3087 +  // When this code is enabled, even simple programs throw NullPointer
  1.3088 +  // exceptions. I'm guessing that this is caused by some constant pool
  1.3089 +  // cache difference between the new, merged constant pool and the
  1.3090 +  // constant pool that was just being used by the klass. I'm keeping
  1.3091 +  // this code around to archive the idea, but the code has to remain
  1.3092 +  // disabled for now.
  1.3093 +
  1.3094 +  // Attach each old method to the new constant pool. This can be
  1.3095 +  // done here since we are past the bytecode verification and
  1.3096 +  // constant pool optimization phases.
  1.3097 +  for (int i = _old_methods->length() - 1; i >= 0; i--) {
  1.3098 +    methodOop method = (methodOop)_old_methods->obj_at(i);
  1.3099 +    method->set_constants(scratch_class->constants());
  1.3100 +  }
  1.3101 +
  1.3102 +  {
  1.3103 +    // walk all previous versions of the klass
  1.3104 +    instanceKlass *ik = (instanceKlass *)the_class()->klass_part();
  1.3105 +    PreviousVersionWalker pvw(ik);
  1.3106 +    instanceKlassHandle ikh;
  1.3107 +    do {
  1.3108 +      ikh = pvw.next_previous_version();
  1.3109 +      if (!ikh.is_null()) {
  1.3110 +        ik = ikh();
  1.3111 +
  1.3112 +        // attach previous version of klass to the new constant pool
  1.3113 +        ik->set_constants(scratch_class->constants());
  1.3114 +
  1.3115 +        // Attach each method in the previous version of klass to the
  1.3116 +        // new constant pool
  1.3117 +        objArrayOop prev_methods = ik->methods();
  1.3118 +        for (int i = prev_methods->length() - 1; i >= 0; i--) {
  1.3119 +          methodOop method = (methodOop)prev_methods->obj_at(i);
  1.3120 +          method->set_constants(scratch_class->constants());
  1.3121 +        }
  1.3122 +      }
  1.3123 +    } while (!ikh.is_null());
  1.3124 +  }
  1.3125 +#endif
  1.3126 +
  1.3127 +  // Replace methods and constantpool
  1.3128 +  the_class->set_methods(_new_methods);
  1.3129 +  scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
  1.3130 +                                          // and to be able to undo operation easily.
  1.3131 +
  1.3132 +  constantPoolOop old_constants = the_class->constants();
  1.3133 +  the_class->set_constants(scratch_class->constants());
  1.3134 +  scratch_class->set_constants(old_constants);  // See the previous comment.
  1.3135 +#if 0
  1.3136 +  // We are swapping the guts of "the new class" with the guts of "the
  1.3137 +  // class". Since the old constant pool has just been attached to "the
  1.3138 +  // new class", it seems logical to set the pool holder in the old
  1.3139 +  // constant pool also. However, doing this will change the observable
  1.3140 +  // class hierarchy for any old methods that are still executing. A
  1.3141 +  // method can query the identity of its "holder" and this query uses
  1.3142 +  // the method's constant pool link to find the holder. The change in
  1.3143 +  // holding class from "the class" to "the new class" can confuse
  1.3144 +  // things.
  1.3145 +  //
  1.3146 +  // Setting the old constant pool's holder will also cause
  1.3147 +  // verification done during vtable initialization below to fail.
  1.3148 +  // During vtable initialization, the vtable's class is verified to be
  1.3149 +  // a subtype of the method's holder. The vtable's class is "the
  1.3150 +  // class" and the method's holder is gotten from the constant pool
  1.3151 +  // link in the method itself. For "the class"'s directly implemented
  1.3152 +  // methods, the method holder is "the class" itself (as gotten from
  1.3153 +  // the new constant pool). The check works fine in this case. The
  1.3154 +  // check also works fine for methods inherited from super classes.
  1.3155 +  //
  1.3156 +  // Miranda methods are a little more complicated. A miranda method is
  1.3157 +  // provided by an interface when the class implementing the interface
  1.3158 +  // does not provide its own method.  These interfaces are implemented
  1.3159 +  // internally as an instanceKlass. These special instanceKlasses
  1.3160 +  // share the constant pool of the class that "implements" the
  1.3161 +  // interface. By sharing the constant pool, the method holder of a
  1.3162 +  // miranda method is the class that "implements" the interface. In a
  1.3163 +  // non-redefine situation, the subtype check works fine. However, if
  1.3164 +  // the old constant pool's pool holder is modified, then the check
  1.3165 +  // fails because there is no class hierarchy relationship between the
  1.3166 +  // vtable's class and "the new class".
  1.3167 +
  1.3168 +  old_constants->set_pool_holder(scratch_class());
  1.3169 +#endif
  1.3170 +
  1.3171 +  // track which methods are EMCP for add_previous_version() call below
  1.3172 +  BitMap emcp_methods(_old_methods->length());
  1.3173 +  int emcp_method_count = 0;
  1.3174 +  emcp_methods.clear();  // clears 0..(length() - 1)
  1.3175 +  check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count);
  1.3176 +  transfer_old_native_function_registrations(the_class);
  1.3177 +
  1.3178 +  // The class file bytes from before any retransformable agents mucked
  1.3179 +  // with them was cached on the scratch class, move to the_class.
  1.3180 +  // Note: we still want to do this if nothing needed caching since it
  1.3181 +  // should get cleared in the_class too.
  1.3182 +  the_class->set_cached_class_file(scratch_class->get_cached_class_file_bytes(),
  1.3183 +                                   scratch_class->get_cached_class_file_len());
  1.3184 +
  1.3185 +  // Replace inner_classes
  1.3186 +  typeArrayOop old_inner_classes = the_class->inner_classes();
  1.3187 +  the_class->set_inner_classes(scratch_class->inner_classes());
  1.3188 +  scratch_class->set_inner_classes(old_inner_classes);
  1.3189 +
  1.3190 +  // Initialize the vtable and interface table after
  1.3191 +  // methods have been rewritten
  1.3192 +  {
  1.3193 +    ResourceMark rm(THREAD);
  1.3194 +    // no exception should happen here since we explicitly
  1.3195 +    // do not check loader constraints.
  1.3196 +    // compare_and_normalize_class_versions has already checked:
  1.3197 +    //  - classloaders unchanged, signatures unchanged
  1.3198 +    //  - all instanceKlasses for redefined classes reused & contents updated
  1.3199 +    the_class->vtable()->initialize_vtable(false, THREAD);
  1.3200 +    the_class->itable()->initialize_itable(false, THREAD);
  1.3201 +    assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::threaddeath_klass())), "redefine exception");
  1.3202 +  }
  1.3203 +
  1.3204 +  // Leave arrays of jmethodIDs and itable index cache unchanged
  1.3205 +
  1.3206 +  // Copy the "source file name" attribute from new class version
  1.3207 +  the_class->set_source_file_name(scratch_class->source_file_name());
  1.3208 +
  1.3209 +  // Copy the "source debug extension" attribute from new class version
  1.3210 +  the_class->set_source_debug_extension(
  1.3211 +    scratch_class->source_debug_extension());
  1.3212 +
  1.3213 +  // Use of javac -g could be different in the old and the new
  1.3214 +  if (scratch_class->access_flags().has_localvariable_table() !=
  1.3215 +      the_class->access_flags().has_localvariable_table()) {
  1.3216 +
  1.3217 +    AccessFlags flags = the_class->access_flags();
  1.3218 +    if (scratch_class->access_flags().has_localvariable_table()) {
  1.3219 +      flags.set_has_localvariable_table();
  1.3220 +    } else {
  1.3221 +      flags.clear_has_localvariable_table();
  1.3222 +    }
  1.3223 +    the_class->set_access_flags(flags);
  1.3224 +  }
  1.3225 +
  1.3226 +  // Replace class annotation fields values
  1.3227 +  typeArrayOop old_class_annotations = the_class->class_annotations();
  1.3228 +  the_class->set_class_annotations(scratch_class->class_annotations());
  1.3229 +  scratch_class->set_class_annotations(old_class_annotations);
  1.3230 +
  1.3231 +  // Replace fields annotation fields values
  1.3232 +  objArrayOop old_fields_annotations = the_class->fields_annotations();
  1.3233 +  the_class->set_fields_annotations(scratch_class->fields_annotations());
  1.3234 +  scratch_class->set_fields_annotations(old_fields_annotations);
  1.3235 +
  1.3236 +  // Replace methods annotation fields values
  1.3237 +  objArrayOop old_methods_annotations = the_class->methods_annotations();
  1.3238 +  the_class->set_methods_annotations(scratch_class->methods_annotations());
  1.3239 +  scratch_class->set_methods_annotations(old_methods_annotations);
  1.3240 +
  1.3241 +  // Replace methods parameter annotation fields values
  1.3242 +  objArrayOop old_methods_parameter_annotations =
  1.3243 +    the_class->methods_parameter_annotations();
  1.3244 +  the_class->set_methods_parameter_annotations(
  1.3245 +    scratch_class->methods_parameter_annotations());
  1.3246 +  scratch_class->set_methods_parameter_annotations(old_methods_parameter_annotations);
  1.3247 +
  1.3248 +  // Replace methods default annotation fields values
  1.3249 +  objArrayOop old_methods_default_annotations =
  1.3250 +    the_class->methods_default_annotations();
  1.3251 +  the_class->set_methods_default_annotations(
  1.3252 +    scratch_class->methods_default_annotations());
  1.3253 +  scratch_class->set_methods_default_annotations(old_methods_default_annotations);
  1.3254 +
  1.3255 +  // Replace minor version number of class file
  1.3256 +  u2 old_minor_version = the_class->minor_version();
  1.3257 +  the_class->set_minor_version(scratch_class->minor_version());
  1.3258 +  scratch_class->set_minor_version(old_minor_version);
  1.3259 +
  1.3260 +  // Replace major version number of class file
  1.3261 +  u2 old_major_version = the_class->major_version();
  1.3262 +  the_class->set_major_version(scratch_class->major_version());
  1.3263 +  scratch_class->set_major_version(old_major_version);
  1.3264 +
  1.3265 +  // Replace CP indexes for class and name+type of enclosing method
  1.3266 +  u2 old_class_idx  = the_class->enclosing_method_class_index();
  1.3267 +  u2 old_method_idx = the_class->enclosing_method_method_index();
  1.3268 +  the_class->set_enclosing_method_indices(
  1.3269 +    scratch_class->enclosing_method_class_index(),
  1.3270 +    scratch_class->enclosing_method_method_index());
  1.3271 +  scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
  1.3272 +
  1.3273 +  // keep track of previous versions of this class
  1.3274 +  the_class->add_previous_version(scratch_class, &emcp_methods,
  1.3275 +    emcp_method_count);
  1.3276 +
  1.3277 +  RC_TIMER_STOP(_timer_rsc_phase1);
  1.3278 +  RC_TIMER_START(_timer_rsc_phase2);
  1.3279 +
  1.3280 +  // Adjust constantpool caches and vtables for all classes
  1.3281 +  // that reference methods of the evolved class.
  1.3282 +  SystemDictionary::classes_do(adjust_cpool_cache_and_vtable, THREAD);
  1.3283 +
  1.3284 +  if (the_class->oop_map_cache() != NULL) {
  1.3285 +    // Flush references to any obsolete methods from the oop map cache
  1.3286 +    // so that obsolete methods are not pinned.
  1.3287 +    the_class->oop_map_cache()->flush_obsolete_entries();
  1.3288 +  }
  1.3289 +
  1.3290 +  // increment the classRedefinedCount field in the_class and in any
  1.3291 +  // direct and indirect subclasses of the_class
  1.3292 +  increment_class_counter((instanceKlass *)the_class()->klass_part(), THREAD);
  1.3293 +
  1.3294 +  // RC_TRACE macro has an embedded ResourceMark
  1.3295 +  RC_TRACE_WITH_THREAD(0x00000001, THREAD,
  1.3296 +    ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
  1.3297 +    the_class->external_name(),
  1.3298 +    java_lang_Class::classRedefinedCount(the_class_mirror),
  1.3299 +    os::available_memory() >> 10));
  1.3300 +
  1.3301 +  RC_TIMER_STOP(_timer_rsc_phase2);
  1.3302 +} // end redefine_single_class()
  1.3303 +
  1.3304 +
  1.3305 +// Increment the classRedefinedCount field in the specific instanceKlass
  1.3306 +// and in all direct and indirect subclasses.
  1.3307 +void VM_RedefineClasses::increment_class_counter(instanceKlass *ik, TRAPS) {
  1.3308 +  oop class_mirror = ik->java_mirror();
  1.3309 +  klassOop class_oop = java_lang_Class::as_klassOop(class_mirror);
  1.3310 +  int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
  1.3311 +  java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
  1.3312 +
  1.3313 +  if (class_oop != _the_class_oop) {
  1.3314 +    // _the_class_oop count is printed at end of redefine_single_class()
  1.3315 +    RC_TRACE_WITH_THREAD(0x00000008, THREAD,
  1.3316 +      ("updated count in subclass=%s to %d", ik->external_name(), new_count));
  1.3317 +  }
  1.3318 +
  1.3319 +  for (Klass *subk = ik->subklass(); subk != NULL;
  1.3320 +       subk = subk->next_sibling()) {
  1.3321 +    klassOop sub = subk->as_klassOop();
  1.3322 +    instanceKlass *subik = (instanceKlass *)sub->klass_part();
  1.3323 +
  1.3324 +    // recursively do subclasses of the current subclass
  1.3325 +    increment_class_counter(subik, THREAD);
  1.3326 +  }
  1.3327 +}
  1.3328 +
  1.3329 +#ifndef PRODUCT
  1.3330 +void VM_RedefineClasses::check_class(klassOop k_oop,
  1.3331 +       oop initiating_loader, TRAPS) {
  1.3332 +  Klass *k = k_oop->klass_part();
  1.3333 +  if (k->oop_is_instance()) {
  1.3334 +    HandleMark hm(THREAD);
  1.3335 +    instanceKlass *ik = (instanceKlass *) k;
  1.3336 +
  1.3337 +    if (ik->vtable_length() > 0) {
  1.3338 +      ResourceMark rm(THREAD);
  1.3339 +      if (!ik->vtable()->check_no_old_entries()) {
  1.3340 +        tty->print_cr("klassVtable::check_no_old_entries failure -- OLD method found -- class: %s", ik->signature_name());
  1.3341 +        ik->vtable()->dump_vtable();
  1.3342 +        dump_methods();
  1.3343 +        assert(false, "OLD method found");
  1.3344 +      }
  1.3345 +    }
  1.3346 +  }
  1.3347 +}
  1.3348 +
  1.3349 +void VM_RedefineClasses::dump_methods() {
  1.3350 +        int j;
  1.3351 +        tty->print_cr("_old_methods --");
  1.3352 +        for (j = 0; j < _old_methods->length(); ++j) {
  1.3353 +          methodOop m = (methodOop) _old_methods->obj_at(j);
  1.3354 +          tty->print("%4d  (%5d)  ", j, m->vtable_index());
  1.3355 +          m->access_flags().print_on(tty);
  1.3356 +          tty->print(" --  ");
  1.3357 +          m->print_name(tty);
  1.3358 +          tty->cr();
  1.3359 +        }
  1.3360 +        tty->print_cr("_new_methods --");
  1.3361 +        for (j = 0; j < _new_methods->length(); ++j) {
  1.3362 +          methodOop m = (methodOop) _new_methods->obj_at(j);
  1.3363 +          tty->print("%4d  (%5d)  ", j, m->vtable_index());
  1.3364 +          m->access_flags().print_on(tty);
  1.3365 +          tty->print(" --  ");
  1.3366 +          m->print_name(tty);
  1.3367 +          tty->cr();
  1.3368 +        }
  1.3369 +        tty->print_cr("_matching_(old/new)_methods --");
  1.3370 +        for (j = 0; j < _matching_methods_length; ++j) {
  1.3371 +          methodOop m = _matching_old_methods[j];
  1.3372 +          tty->print("%4d  (%5d)  ", j, m->vtable_index());
  1.3373 +          m->access_flags().print_on(tty);
  1.3374 +          tty->print(" --  ");
  1.3375 +          m->print_name(tty);
  1.3376 +          tty->cr();
  1.3377 +          m = _matching_new_methods[j];
  1.3378 +          tty->print("      (%5d)  ", m->vtable_index());
  1.3379 +          m->access_flags().print_on(tty);
  1.3380 +          tty->cr();
  1.3381 +        }
  1.3382 +        tty->print_cr("_deleted_methods --");
  1.3383 +        for (j = 0; j < _deleted_methods_length; ++j) {
  1.3384 +          methodOop m = _deleted_methods[j];
  1.3385 +          tty->print("%4d  (%5d)  ", j, m->vtable_index());
  1.3386 +          m->access_flags().print_on(tty);
  1.3387 +          tty->print(" --  ");
  1.3388 +          m->print_name(tty);
  1.3389 +          tty->cr();
  1.3390 +        }
  1.3391 +        tty->print_cr("_added_methods --");
  1.3392 +        for (j = 0; j < _added_methods_length; ++j) {
  1.3393 +          methodOop m = _added_methods[j];
  1.3394 +          tty->print("%4d  (%5d)  ", j, m->vtable_index());
  1.3395 +          m->access_flags().print_on(tty);
  1.3396 +          tty->print(" --  ");
  1.3397 +          m->print_name(tty);
  1.3398 +          tty->cr();
  1.3399 +        }
  1.3400 +}
  1.3401 +#endif

mercurial