src/share/vm/prims/jvmtiRedefineClasses.cpp

changeset 0
f90c822e73f8
child 6876
710a3c8b516e
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Wed Apr 27 01:25:04 2016 +0800
     1.3 @@ -0,0 +1,3658 @@
     1.4 +/*
     1.5 + * Copyright (c) 2003, 2014, Oracle and/or its affiliates. All rights reserved.
     1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     1.7 + *
     1.8 + * This code is free software; you can redistribute it and/or modify it
     1.9 + * under the terms of the GNU General Public License version 2 only, as
    1.10 + * published by the Free Software Foundation.
    1.11 + *
    1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT
    1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    1.14 + * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    1.15 + * version 2 for more details (a copy is included in the LICENSE file that
    1.16 + * accompanied this code).
    1.17 + *
    1.18 + * You should have received a copy of the GNU General Public License version
    1.19 + * 2 along with this work; if not, write to the Free Software Foundation,
    1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    1.21 + *
    1.22 + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    1.23 + * or visit www.oracle.com if you need additional information or have any
    1.24 + * questions.
    1.25 + *
    1.26 + */
    1.27 +
    1.28 +#include "precompiled.hpp"
    1.29 +#include "classfile/metadataOnStackMark.hpp"
    1.30 +#include "classfile/systemDictionary.hpp"
    1.31 +#include "classfile/verifier.hpp"
    1.32 +#include "code/codeCache.hpp"
    1.33 +#include "compiler/compileBroker.hpp"
    1.34 +#include "interpreter/oopMapCache.hpp"
    1.35 +#include "interpreter/rewriter.hpp"
    1.36 +#include "memory/gcLocker.hpp"
    1.37 +#include "memory/metadataFactory.hpp"
    1.38 +#include "memory/metaspaceShared.hpp"
    1.39 +#include "memory/universe.inline.hpp"
    1.40 +#include "oops/fieldStreams.hpp"
    1.41 +#include "oops/klassVtable.hpp"
    1.42 +#include "prims/jvmtiImpl.hpp"
    1.43 +#include "prims/jvmtiRedefineClasses.hpp"
    1.44 +#include "prims/methodComparator.hpp"
    1.45 +#include "runtime/deoptimization.hpp"
    1.46 +#include "runtime/relocator.hpp"
    1.47 +#include "utilities/bitMap.inline.hpp"
    1.48 +
    1.49 +PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
    1.50 +
    1.51 +Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
    1.52 +Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
    1.53 +Method**  VM_RedefineClasses::_matching_old_methods = NULL;
    1.54 +Method**  VM_RedefineClasses::_matching_new_methods = NULL;
    1.55 +Method**  VM_RedefineClasses::_deleted_methods      = NULL;
    1.56 +Method**  VM_RedefineClasses::_added_methods        = NULL;
    1.57 +int         VM_RedefineClasses::_matching_methods_length = 0;
    1.58 +int         VM_RedefineClasses::_deleted_methods_length  = 0;
    1.59 +int         VM_RedefineClasses::_added_methods_length    = 0;
    1.60 +Klass*      VM_RedefineClasses::_the_class_oop = NULL;
    1.61 +
    1.62 +
    1.63 +VM_RedefineClasses::VM_RedefineClasses(jint class_count,
    1.64 +                                       const jvmtiClassDefinition *class_defs,
    1.65 +                                       JvmtiClassLoadKind class_load_kind) {
    1.66 +  _class_count = class_count;
    1.67 +  _class_defs = class_defs;
    1.68 +  _class_load_kind = class_load_kind;
    1.69 +  _res = JVMTI_ERROR_NONE;
    1.70 +}
    1.71 +
    1.72 +bool VM_RedefineClasses::doit_prologue() {
    1.73 +  if (_class_count == 0) {
    1.74 +    _res = JVMTI_ERROR_NONE;
    1.75 +    return false;
    1.76 +  }
    1.77 +  if (_class_defs == NULL) {
    1.78 +    _res = JVMTI_ERROR_NULL_POINTER;
    1.79 +    return false;
    1.80 +  }
    1.81 +  for (int i = 0; i < _class_count; i++) {
    1.82 +    if (_class_defs[i].klass == NULL) {
    1.83 +      _res = JVMTI_ERROR_INVALID_CLASS;
    1.84 +      return false;
    1.85 +    }
    1.86 +    if (_class_defs[i].class_byte_count == 0) {
    1.87 +      _res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
    1.88 +      return false;
    1.89 +    }
    1.90 +    if (_class_defs[i].class_bytes == NULL) {
    1.91 +      _res = JVMTI_ERROR_NULL_POINTER;
    1.92 +      return false;
    1.93 +    }
    1.94 +  }
    1.95 +
    1.96 +  // Start timer after all the sanity checks; not quite accurate, but
    1.97 +  // better than adding a bunch of stop() calls.
    1.98 +  RC_TIMER_START(_timer_vm_op_prologue);
    1.99 +
   1.100 +  // We first load new class versions in the prologue, because somewhere down the
   1.101 +  // call chain it is required that the current thread is a Java thread.
   1.102 +  _res = load_new_class_versions(Thread::current());
   1.103 +  if (_res != JVMTI_ERROR_NONE) {
   1.104 +    // free any successfully created classes, since none are redefined
   1.105 +    for (int i = 0; i < _class_count; i++) {
   1.106 +      if (_scratch_classes[i] != NULL) {
   1.107 +        ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
   1.108 +        // Free the memory for this class at class unloading time.  Not before
   1.109 +        // because CMS might think this is still live.
   1.110 +        cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
   1.111 +      }
   1.112 +    }
   1.113 +    // Free os::malloc allocated memory in load_new_class_version.
   1.114 +    os::free(_scratch_classes);
   1.115 +    RC_TIMER_STOP(_timer_vm_op_prologue);
   1.116 +    return false;
   1.117 +  }
   1.118 +
   1.119 +  RC_TIMER_STOP(_timer_vm_op_prologue);
   1.120 +  return true;
   1.121 +}
   1.122 +
   1.123 +void VM_RedefineClasses::doit() {
   1.124 +  Thread *thread = Thread::current();
   1.125 +
   1.126 +  if (UseSharedSpaces) {
   1.127 +    // Sharing is enabled so we remap the shared readonly space to
   1.128 +    // shared readwrite, private just in case we need to redefine
   1.129 +    // a shared class. We do the remap during the doit() phase of
   1.130 +    // the safepoint to be safer.
   1.131 +    if (!MetaspaceShared::remap_shared_readonly_as_readwrite()) {
   1.132 +      RC_TRACE_WITH_THREAD(0x00000001, thread,
   1.133 +        ("failed to remap shared readonly space to readwrite, private"));
   1.134 +      _res = JVMTI_ERROR_INTERNAL;
   1.135 +      return;
   1.136 +    }
   1.137 +  }
   1.138 +
   1.139 +  // Mark methods seen on stack and everywhere else so old methods are not
   1.140 +  // cleaned up if they're on the stack.
   1.141 +  MetadataOnStackMark md_on_stack;
   1.142 +  HandleMark hm(thread);   // make sure any handles created are deleted
   1.143 +                           // before the stack walk again.
   1.144 +
   1.145 +  for (int i = 0; i < _class_count; i++) {
   1.146 +    redefine_single_class(_class_defs[i].klass, _scratch_classes[i], thread);
   1.147 +    ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
   1.148 +    // Free the memory for this class at class unloading time.  Not before
   1.149 +    // because CMS might think this is still live.
   1.150 +    cld->add_to_deallocate_list((InstanceKlass*)_scratch_classes[i]);
   1.151 +    _scratch_classes[i] = NULL;
   1.152 +  }
   1.153 +
   1.154 +  // Disable any dependent concurrent compilations
   1.155 +  SystemDictionary::notice_modification();
   1.156 +
   1.157 +  // Set flag indicating that some invariants are no longer true.
   1.158 +  // See jvmtiExport.hpp for detailed explanation.
   1.159 +  JvmtiExport::set_has_redefined_a_class();
   1.160 +
   1.161 +// check_class() is optionally called for product bits, but is
   1.162 +// always called for non-product bits.
   1.163 +#ifdef PRODUCT
   1.164 +  if (RC_TRACE_ENABLED(0x00004000)) {
   1.165 +#endif
   1.166 +    RC_TRACE_WITH_THREAD(0x00004000, thread, ("calling check_class"));
   1.167 +    CheckClass check_class(thread);
   1.168 +    ClassLoaderDataGraph::classes_do(&check_class);
   1.169 +#ifdef PRODUCT
   1.170 +  }
   1.171 +#endif
   1.172 +}
   1.173 +
   1.174 +void VM_RedefineClasses::doit_epilogue() {
   1.175 +  // Free os::malloc allocated memory.
   1.176 +  os::free(_scratch_classes);
   1.177 +
   1.178 +  if (RC_TRACE_ENABLED(0x00000004)) {
   1.179 +    // Used to have separate timers for "doit" and "all", but the timer
   1.180 +    // overhead skewed the measurements.
   1.181 +    jlong doit_time = _timer_rsc_phase1.milliseconds() +
   1.182 +                      _timer_rsc_phase2.milliseconds();
   1.183 +    jlong all_time = _timer_vm_op_prologue.milliseconds() + doit_time;
   1.184 +
   1.185 +    RC_TRACE(0x00000004, ("vm_op: all=" UINT64_FORMAT
   1.186 +      "  prologue=" UINT64_FORMAT "  doit=" UINT64_FORMAT, all_time,
   1.187 +      _timer_vm_op_prologue.milliseconds(), doit_time));
   1.188 +    RC_TRACE(0x00000004,
   1.189 +      ("redefine_single_class: phase1=" UINT64_FORMAT "  phase2=" UINT64_FORMAT,
   1.190 +       _timer_rsc_phase1.milliseconds(), _timer_rsc_phase2.milliseconds()));
   1.191 +  }
   1.192 +}
   1.193 +
   1.194 +bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
   1.195 +  // classes for primitives cannot be redefined
   1.196 +  if (java_lang_Class::is_primitive(klass_mirror)) {
   1.197 +    return false;
   1.198 +  }
   1.199 +  Klass* the_class_oop = java_lang_Class::as_Klass(klass_mirror);
   1.200 +  // classes for arrays cannot be redefined
   1.201 +  if (the_class_oop == NULL || !the_class_oop->oop_is_instance()) {
   1.202 +    return false;
   1.203 +  }
   1.204 +  return true;
   1.205 +}
   1.206 +
   1.207 +// Append the current entry at scratch_i in scratch_cp to *merge_cp_p
   1.208 +// where the end of *merge_cp_p is specified by *merge_cp_length_p. For
   1.209 +// direct CP entries, there is just the current entry to append. For
   1.210 +// indirect and double-indirect CP entries, there are zero or more
   1.211 +// referenced CP entries along with the current entry to append.
   1.212 +// Indirect and double-indirect CP entries are handled by recursive
   1.213 +// calls to append_entry() as needed. The referenced CP entries are
   1.214 +// always appended to *merge_cp_p before the referee CP entry. These
   1.215 +// referenced CP entries may already exist in *merge_cp_p in which case
   1.216 +// there is nothing extra to append and only the current entry is
   1.217 +// appended.
   1.218 +void VM_RedefineClasses::append_entry(constantPoolHandle scratch_cp,
   1.219 +       int scratch_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p,
   1.220 +       TRAPS) {
   1.221 +
   1.222 +  // append is different depending on entry tag type
   1.223 +  switch (scratch_cp->tag_at(scratch_i).value()) {
   1.224 +
   1.225 +    // The old verifier is implemented outside the VM. It loads classes,
   1.226 +    // but does not resolve constant pool entries directly so we never
   1.227 +    // see Class entries here with the old verifier. Similarly the old
   1.228 +    // verifier does not like Class entries in the input constant pool.
   1.229 +    // The split-verifier is implemented in the VM so it can optionally
   1.230 +    // and directly resolve constant pool entries to load classes. The
   1.231 +    // split-verifier can accept either Class entries or UnresolvedClass
   1.232 +    // entries in the input constant pool. We revert the appended copy
   1.233 +    // back to UnresolvedClass so that either verifier will be happy
   1.234 +    // with the constant pool entry.
   1.235 +    case JVM_CONSTANT_Class:
   1.236 +    {
   1.237 +      // revert the copy to JVM_CONSTANT_UnresolvedClass
   1.238 +      (*merge_cp_p)->unresolved_klass_at_put(*merge_cp_length_p,
   1.239 +        scratch_cp->klass_name_at(scratch_i));
   1.240 +
   1.241 +      if (scratch_i != *merge_cp_length_p) {
   1.242 +        // The new entry in *merge_cp_p is at a different index than
   1.243 +        // the new entry in scratch_cp so we need to map the index values.
   1.244 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.245 +      }
   1.246 +      (*merge_cp_length_p)++;
   1.247 +    } break;
   1.248 +
   1.249 +    // these are direct CP entries so they can be directly appended,
   1.250 +    // but double and long take two constant pool entries
   1.251 +    case JVM_CONSTANT_Double:  // fall through
   1.252 +    case JVM_CONSTANT_Long:
   1.253 +    {
   1.254 +      ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
   1.255 +        THREAD);
   1.256 +
   1.257 +      if (scratch_i != *merge_cp_length_p) {
   1.258 +        // The new entry in *merge_cp_p is at a different index than
   1.259 +        // the new entry in scratch_cp so we need to map the index values.
   1.260 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.261 +      }
   1.262 +      (*merge_cp_length_p) += 2;
   1.263 +    } break;
   1.264 +
   1.265 +    // these are direct CP entries so they can be directly appended
   1.266 +    case JVM_CONSTANT_Float:   // fall through
   1.267 +    case JVM_CONSTANT_Integer: // fall through
   1.268 +    case JVM_CONSTANT_Utf8:    // fall through
   1.269 +
   1.270 +    // This was an indirect CP entry, but it has been changed into
   1.271 +    // Symbol*s so this entry can be directly appended.
   1.272 +    case JVM_CONSTANT_String:      // fall through
   1.273 +
   1.274 +    // These were indirect CP entries, but they have been changed into
   1.275 +    // Symbol*s so these entries can be directly appended.
   1.276 +    case JVM_CONSTANT_UnresolvedClass:  // fall through
   1.277 +    {
   1.278 +      ConstantPool::copy_entry_to(scratch_cp, scratch_i, *merge_cp_p, *merge_cp_length_p,
   1.279 +        THREAD);
   1.280 +
   1.281 +      if (scratch_i != *merge_cp_length_p) {
   1.282 +        // The new entry in *merge_cp_p is at a different index than
   1.283 +        // the new entry in scratch_cp so we need to map the index values.
   1.284 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.285 +      }
   1.286 +      (*merge_cp_length_p)++;
   1.287 +    } break;
   1.288 +
   1.289 +    // this is an indirect CP entry so it needs special handling
   1.290 +    case JVM_CONSTANT_NameAndType:
   1.291 +    {
   1.292 +      int name_ref_i = scratch_cp->name_ref_index_at(scratch_i);
   1.293 +      int new_name_ref_i = find_or_append_indirect_entry(scratch_cp, name_ref_i, merge_cp_p,
   1.294 +                                                         merge_cp_length_p, THREAD);
   1.295 +
   1.296 +      int signature_ref_i = scratch_cp->signature_ref_index_at(scratch_i);
   1.297 +      int new_signature_ref_i = find_or_append_indirect_entry(scratch_cp, signature_ref_i,
   1.298 +                                                              merge_cp_p, merge_cp_length_p,
   1.299 +                                                              THREAD);
   1.300 +
   1.301 +      // If the referenced entries already exist in *merge_cp_p, then
   1.302 +      // both new_name_ref_i and new_signature_ref_i will both be 0.
   1.303 +      // In that case, all we are appending is the current entry.
   1.304 +      if (new_name_ref_i != name_ref_i) {
   1.305 +        RC_TRACE(0x00080000,
   1.306 +          ("NameAndType entry@%d name_ref_index change: %d to %d",
   1.307 +          *merge_cp_length_p, name_ref_i, new_name_ref_i));
   1.308 +      }
   1.309 +      if (new_signature_ref_i != signature_ref_i) {
   1.310 +        RC_TRACE(0x00080000,
   1.311 +          ("NameAndType entry@%d signature_ref_index change: %d to %d",
   1.312 +          *merge_cp_length_p, signature_ref_i, new_signature_ref_i));
   1.313 +      }
   1.314 +
   1.315 +      (*merge_cp_p)->name_and_type_at_put(*merge_cp_length_p,
   1.316 +        new_name_ref_i, new_signature_ref_i);
   1.317 +      if (scratch_i != *merge_cp_length_p) {
   1.318 +        // The new entry in *merge_cp_p is at a different index than
   1.319 +        // the new entry in scratch_cp so we need to map the index values.
   1.320 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.321 +      }
   1.322 +      (*merge_cp_length_p)++;
   1.323 +    } break;
   1.324 +
   1.325 +    // this is a double-indirect CP entry so it needs special handling
   1.326 +    case JVM_CONSTANT_Fieldref:           // fall through
   1.327 +    case JVM_CONSTANT_InterfaceMethodref: // fall through
   1.328 +    case JVM_CONSTANT_Methodref:
   1.329 +    {
   1.330 +      int klass_ref_i = scratch_cp->uncached_klass_ref_index_at(scratch_i);
   1.331 +      int new_klass_ref_i = find_or_append_indirect_entry(scratch_cp, klass_ref_i,
   1.332 +                                                          merge_cp_p, merge_cp_length_p, THREAD);
   1.333 +
   1.334 +      int name_and_type_ref_i = scratch_cp->uncached_name_and_type_ref_index_at(scratch_i);
   1.335 +      int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
   1.336 +                                                          merge_cp_p, merge_cp_length_p, THREAD);
   1.337 +
   1.338 +      const char *entry_name;
   1.339 +      switch (scratch_cp->tag_at(scratch_i).value()) {
   1.340 +      case JVM_CONSTANT_Fieldref:
   1.341 +        entry_name = "Fieldref";
   1.342 +        (*merge_cp_p)->field_at_put(*merge_cp_length_p, new_klass_ref_i,
   1.343 +          new_name_and_type_ref_i);
   1.344 +        break;
   1.345 +      case JVM_CONSTANT_InterfaceMethodref:
   1.346 +        entry_name = "IFMethodref";
   1.347 +        (*merge_cp_p)->interface_method_at_put(*merge_cp_length_p,
   1.348 +          new_klass_ref_i, new_name_and_type_ref_i);
   1.349 +        break;
   1.350 +      case JVM_CONSTANT_Methodref:
   1.351 +        entry_name = "Methodref";
   1.352 +        (*merge_cp_p)->method_at_put(*merge_cp_length_p, new_klass_ref_i,
   1.353 +          new_name_and_type_ref_i);
   1.354 +        break;
   1.355 +      default:
   1.356 +        guarantee(false, "bad switch");
   1.357 +        break;
   1.358 +      }
   1.359 +
   1.360 +      if (klass_ref_i != new_klass_ref_i) {
   1.361 +        RC_TRACE(0x00080000, ("%s entry@%d class_index changed: %d to %d",
   1.362 +          entry_name, *merge_cp_length_p, klass_ref_i, new_klass_ref_i));
   1.363 +      }
   1.364 +      if (name_and_type_ref_i != new_name_and_type_ref_i) {
   1.365 +        RC_TRACE(0x00080000,
   1.366 +          ("%s entry@%d name_and_type_index changed: %d to %d",
   1.367 +          entry_name, *merge_cp_length_p, name_and_type_ref_i,
   1.368 +          new_name_and_type_ref_i));
   1.369 +      }
   1.370 +
   1.371 +      if (scratch_i != *merge_cp_length_p) {
   1.372 +        // The new entry in *merge_cp_p is at a different index than
   1.373 +        // the new entry in scratch_cp so we need to map the index values.
   1.374 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.375 +      }
   1.376 +      (*merge_cp_length_p)++;
   1.377 +    } break;
   1.378 +
   1.379 +    // this is an indirect CP entry so it needs special handling
   1.380 +    case JVM_CONSTANT_MethodType:
   1.381 +    {
   1.382 +      int ref_i = scratch_cp->method_type_index_at(scratch_i);
   1.383 +      int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
   1.384 +                                                    merge_cp_length_p, THREAD);
   1.385 +      if (new_ref_i != ref_i) {
   1.386 +        RC_TRACE(0x00080000,
   1.387 +                 ("MethodType entry@%d ref_index change: %d to %d",
   1.388 +                  *merge_cp_length_p, ref_i, new_ref_i));
   1.389 +      }
   1.390 +      (*merge_cp_p)->method_type_index_at_put(*merge_cp_length_p, new_ref_i);
   1.391 +      if (scratch_i != *merge_cp_length_p) {
   1.392 +        // The new entry in *merge_cp_p is at a different index than
   1.393 +        // the new entry in scratch_cp so we need to map the index values.
   1.394 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.395 +      }
   1.396 +      (*merge_cp_length_p)++;
   1.397 +    } break;
   1.398 +
   1.399 +    // this is an indirect CP entry so it needs special handling
   1.400 +    case JVM_CONSTANT_MethodHandle:
   1.401 +    {
   1.402 +      int ref_kind = scratch_cp->method_handle_ref_kind_at(scratch_i);
   1.403 +      int ref_i = scratch_cp->method_handle_index_at(scratch_i);
   1.404 +      int new_ref_i = find_or_append_indirect_entry(scratch_cp, ref_i, merge_cp_p,
   1.405 +                                                    merge_cp_length_p, THREAD);
   1.406 +      if (new_ref_i != ref_i) {
   1.407 +        RC_TRACE(0x00080000,
   1.408 +                 ("MethodHandle entry@%d ref_index change: %d to %d",
   1.409 +                  *merge_cp_length_p, ref_i, new_ref_i));
   1.410 +      }
   1.411 +      (*merge_cp_p)->method_handle_index_at_put(*merge_cp_length_p, ref_kind, new_ref_i);
   1.412 +      if (scratch_i != *merge_cp_length_p) {
   1.413 +        // The new entry in *merge_cp_p is at a different index than
   1.414 +        // the new entry in scratch_cp so we need to map the index values.
   1.415 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.416 +      }
   1.417 +      (*merge_cp_length_p)++;
   1.418 +    } break;
   1.419 +
   1.420 +    // this is an indirect CP entry so it needs special handling
   1.421 +    case JVM_CONSTANT_InvokeDynamic:
   1.422 +    {
   1.423 +      // Index of the bootstrap specifier in the operands array
   1.424 +      int old_bs_i = scratch_cp->invoke_dynamic_bootstrap_specifier_index(scratch_i);
   1.425 +      int new_bs_i = find_or_append_operand(scratch_cp, old_bs_i, merge_cp_p,
   1.426 +                                            merge_cp_length_p, THREAD);
   1.427 +      // The bootstrap method NameAndType_info index
   1.428 +      int old_ref_i = scratch_cp->invoke_dynamic_name_and_type_ref_index_at(scratch_i);
   1.429 +      int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
   1.430 +                                                    merge_cp_length_p, THREAD);
   1.431 +      if (new_bs_i != old_bs_i) {
   1.432 +        RC_TRACE(0x00080000,
   1.433 +                 ("InvokeDynamic entry@%d bootstrap_method_attr_index change: %d to %d",
   1.434 +                  *merge_cp_length_p, old_bs_i, new_bs_i));
   1.435 +      }
   1.436 +      if (new_ref_i != old_ref_i) {
   1.437 +        RC_TRACE(0x00080000,
   1.438 +                 ("InvokeDynamic entry@%d name_and_type_index change: %d to %d",
   1.439 +                  *merge_cp_length_p, old_ref_i, new_ref_i));
   1.440 +      }
   1.441 +
   1.442 +      (*merge_cp_p)->invoke_dynamic_at_put(*merge_cp_length_p, new_bs_i, new_ref_i);
   1.443 +      if (scratch_i != *merge_cp_length_p) {
   1.444 +        // The new entry in *merge_cp_p is at a different index than
   1.445 +        // the new entry in scratch_cp so we need to map the index values.
   1.446 +        map_index(scratch_cp, scratch_i, *merge_cp_length_p);
   1.447 +      }
   1.448 +      (*merge_cp_length_p)++;
   1.449 +    } break;
   1.450 +
   1.451 +    // At this stage, Class or UnresolvedClass could be here, but not
   1.452 +    // ClassIndex
   1.453 +    case JVM_CONSTANT_ClassIndex: // fall through
   1.454 +
   1.455 +    // Invalid is used as the tag for the second constant pool entry
   1.456 +    // occupied by JVM_CONSTANT_Double or JVM_CONSTANT_Long. It should
   1.457 +    // not be seen by itself.
   1.458 +    case JVM_CONSTANT_Invalid: // fall through
   1.459 +
   1.460 +    // At this stage, String could be here, but not StringIndex
   1.461 +    case JVM_CONSTANT_StringIndex: // fall through
   1.462 +
   1.463 +    // At this stage JVM_CONSTANT_UnresolvedClassInError should not be
   1.464 +    // here
   1.465 +    case JVM_CONSTANT_UnresolvedClassInError: // fall through
   1.466 +
   1.467 +    default:
   1.468 +    {
   1.469 +      // leave a breadcrumb
   1.470 +      jbyte bad_value = scratch_cp->tag_at(scratch_i).value();
   1.471 +      ShouldNotReachHere();
   1.472 +    } break;
   1.473 +  } // end switch tag value
   1.474 +} // end append_entry()
   1.475 +
   1.476 +
   1.477 +int VM_RedefineClasses::find_or_append_indirect_entry(constantPoolHandle scratch_cp,
   1.478 +      int ref_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
   1.479 +
   1.480 +  int new_ref_i = ref_i;
   1.481 +  bool match = (ref_i < *merge_cp_length_p) &&
   1.482 +               scratch_cp->compare_entry_to(ref_i, *merge_cp_p, ref_i, THREAD);
   1.483 +
   1.484 +  if (!match) {
   1.485 +    // forward reference in *merge_cp_p or not a direct match
   1.486 +    int found_i = scratch_cp->find_matching_entry(ref_i, *merge_cp_p, THREAD);
   1.487 +    if (found_i != 0) {
   1.488 +      guarantee(found_i != ref_i, "compare_entry_to() and find_matching_entry() do not agree");
   1.489 +      // Found a matching entry somewhere else in *merge_cp_p so just need a mapping entry.
   1.490 +      new_ref_i = found_i;
   1.491 +      map_index(scratch_cp, ref_i, found_i);
   1.492 +    } else {
   1.493 +      // no match found so we have to append this entry to *merge_cp_p
   1.494 +      append_entry(scratch_cp, ref_i, merge_cp_p, merge_cp_length_p, THREAD);
   1.495 +      // The above call to append_entry() can only append one entry
   1.496 +      // so the post call query of *merge_cp_length_p is only for
   1.497 +      // the sake of consistency.
   1.498 +      new_ref_i = *merge_cp_length_p - 1;
   1.499 +    }
   1.500 +  }
   1.501 +
   1.502 +  return new_ref_i;
   1.503 +} // end find_or_append_indirect_entry()
   1.504 +
   1.505 +
   1.506 +// Append a bootstrap specifier into the merge_cp operands that is semantically equal
   1.507 +// to the scratch_cp operands bootstrap specifier passed by the old_bs_i index.
   1.508 +// Recursively append new merge_cp entries referenced by the new bootstrap specifier.
   1.509 +void VM_RedefineClasses::append_operand(constantPoolHandle scratch_cp, int old_bs_i,
   1.510 +       constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
   1.511 +
   1.512 +  int old_ref_i = scratch_cp->operand_bootstrap_method_ref_index_at(old_bs_i);
   1.513 +  int new_ref_i = find_or_append_indirect_entry(scratch_cp, old_ref_i, merge_cp_p,
   1.514 +                                                merge_cp_length_p, THREAD);
   1.515 +  if (new_ref_i != old_ref_i) {
   1.516 +    RC_TRACE(0x00080000,
   1.517 +             ("operands entry@%d bootstrap method ref_index change: %d to %d",
   1.518 +              _operands_cur_length, old_ref_i, new_ref_i));
   1.519 +  }
   1.520 +
   1.521 +  Array<u2>* merge_ops = (*merge_cp_p)->operands();
   1.522 +  int new_bs_i = _operands_cur_length;
   1.523 +  // We have _operands_cur_length == 0 when the merge_cp operands is empty yet.
   1.524 +  // However, the operand_offset_at(0) was set in the extend_operands() call.
   1.525 +  int new_base = (new_bs_i == 0) ? (*merge_cp_p)->operand_offset_at(0)
   1.526 +                                 : (*merge_cp_p)->operand_next_offset_at(new_bs_i - 1);
   1.527 +  int argc     = scratch_cp->operand_argument_count_at(old_bs_i);
   1.528 +
   1.529 +  ConstantPool::operand_offset_at_put(merge_ops, _operands_cur_length, new_base);
   1.530 +  merge_ops->at_put(new_base++, new_ref_i);
   1.531 +  merge_ops->at_put(new_base++, argc);
   1.532 +
   1.533 +  for (int i = 0; i < argc; i++) {
   1.534 +    int old_arg_ref_i = scratch_cp->operand_argument_index_at(old_bs_i, i);
   1.535 +    int new_arg_ref_i = find_or_append_indirect_entry(scratch_cp, old_arg_ref_i, merge_cp_p,
   1.536 +                                                      merge_cp_length_p, THREAD);
   1.537 +    merge_ops->at_put(new_base++, new_arg_ref_i);
   1.538 +    if (new_arg_ref_i != old_arg_ref_i) {
   1.539 +      RC_TRACE(0x00080000,
   1.540 +               ("operands entry@%d bootstrap method argument ref_index change: %d to %d",
   1.541 +                _operands_cur_length, old_arg_ref_i, new_arg_ref_i));
   1.542 +    }
   1.543 +  }
   1.544 +  if (old_bs_i != _operands_cur_length) {
   1.545 +    // The bootstrap specifier in *merge_cp_p is at a different index than
   1.546 +    // that in scratch_cp so we need to map the index values.
   1.547 +    map_operand_index(old_bs_i, new_bs_i);
   1.548 +  }
   1.549 +  _operands_cur_length++;
   1.550 +} // end append_operand()
   1.551 +
   1.552 +
   1.553 +int VM_RedefineClasses::find_or_append_operand(constantPoolHandle scratch_cp,
   1.554 +      int old_bs_i, constantPoolHandle *merge_cp_p, int *merge_cp_length_p, TRAPS) {
   1.555 +
   1.556 +  int new_bs_i = old_bs_i; // bootstrap specifier index
   1.557 +  bool match = (old_bs_i < _operands_cur_length) &&
   1.558 +               scratch_cp->compare_operand_to(old_bs_i, *merge_cp_p, old_bs_i, THREAD);
   1.559 +
   1.560 +  if (!match) {
   1.561 +    // forward reference in *merge_cp_p or not a direct match
   1.562 +    int found_i = scratch_cp->find_matching_operand(old_bs_i, *merge_cp_p,
   1.563 +                                                    _operands_cur_length, THREAD);
   1.564 +    if (found_i != -1) {
   1.565 +      guarantee(found_i != old_bs_i, "compare_operand_to() and find_matching_operand() disagree");
   1.566 +      // found a matching operand somewhere else in *merge_cp_p so just need a mapping
   1.567 +      new_bs_i = found_i;
   1.568 +      map_operand_index(old_bs_i, found_i);
   1.569 +    } else {
   1.570 +      // no match found so we have to append this bootstrap specifier to *merge_cp_p
   1.571 +      append_operand(scratch_cp, old_bs_i, merge_cp_p, merge_cp_length_p, THREAD);
   1.572 +      new_bs_i = _operands_cur_length - 1;
   1.573 +    }
   1.574 +  }
   1.575 +  return new_bs_i;
   1.576 +} // end find_or_append_operand()
   1.577 +
   1.578 +
   1.579 +void VM_RedefineClasses::finalize_operands_merge(constantPoolHandle merge_cp, TRAPS) {
   1.580 +  if (merge_cp->operands() == NULL) {
   1.581 +    return;
   1.582 +  }
   1.583 +  // Shrink the merge_cp operands
   1.584 +  merge_cp->shrink_operands(_operands_cur_length, CHECK);
   1.585 +
   1.586 +  if (RC_TRACE_ENABLED(0x00040000)) {
   1.587 +    // don't want to loop unless we are tracing
   1.588 +    int count = 0;
   1.589 +    for (int i = 1; i < _operands_index_map_p->length(); i++) {
   1.590 +      int value = _operands_index_map_p->at(i);
   1.591 +      if (value != -1) {
   1.592 +        RC_TRACE_WITH_THREAD(0x00040000, THREAD,
   1.593 +          ("operands_index_map[%d]: old=%d new=%d", count, i, value));
   1.594 +        count++;
   1.595 +      }
   1.596 +    }
   1.597 +  }
   1.598 +  // Clean-up
   1.599 +  _operands_index_map_p = NULL;
   1.600 +  _operands_cur_length = 0;
   1.601 +  _operands_index_map_count = 0;
   1.602 +} // end finalize_operands_merge()
   1.603 +
   1.604 +
   1.605 +jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
   1.606 +             instanceKlassHandle the_class,
   1.607 +             instanceKlassHandle scratch_class) {
   1.608 +  int i;
   1.609 +
   1.610 +  // Check superclasses, or rather their names, since superclasses themselves can be
   1.611 +  // requested to replace.
   1.612 +  // Check for NULL superclass first since this might be java.lang.Object
   1.613 +  if (the_class->super() != scratch_class->super() &&
   1.614 +      (the_class->super() == NULL || scratch_class->super() == NULL ||
   1.615 +       the_class->super()->name() !=
   1.616 +       scratch_class->super()->name())) {
   1.617 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.618 +  }
   1.619 +
   1.620 +  // Check if the number, names and order of directly implemented interfaces are the same.
   1.621 +  // I think in principle we should just check if the sets of names of directly implemented
   1.622 +  // interfaces are the same, i.e. the order of declaration (which, however, if changed in the
   1.623 +  // .java file, also changes in .class file) should not matter. However, comparing sets is
   1.624 +  // technically a bit more difficult, and, more importantly, I am not sure at present that the
   1.625 +  // order of interfaces does not matter on the implementation level, i.e. that the VM does not
   1.626 +  // rely on it somewhere.
   1.627 +  Array<Klass*>* k_interfaces = the_class->local_interfaces();
   1.628 +  Array<Klass*>* k_new_interfaces = scratch_class->local_interfaces();
   1.629 +  int n_intfs = k_interfaces->length();
   1.630 +  if (n_intfs != k_new_interfaces->length()) {
   1.631 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.632 +  }
   1.633 +  for (i = 0; i < n_intfs; i++) {
   1.634 +    if (k_interfaces->at(i)->name() !=
   1.635 +        k_new_interfaces->at(i)->name()) {
   1.636 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
   1.637 +    }
   1.638 +  }
   1.639 +
   1.640 +  // Check whether class is in the error init state.
   1.641 +  if (the_class->is_in_error_state()) {
   1.642 +    // TBD #5057930: special error code is needed in 1.6
   1.643 +    return JVMTI_ERROR_INVALID_CLASS;
   1.644 +  }
   1.645 +
   1.646 +  // Check whether class modifiers are the same.
   1.647 +  jushort old_flags = (jushort) the_class->access_flags().get_flags();
   1.648 +  jushort new_flags = (jushort) scratch_class->access_flags().get_flags();
   1.649 +  if (old_flags != new_flags) {
   1.650 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_CLASS_MODIFIERS_CHANGED;
   1.651 +  }
   1.652 +
   1.653 +  // Check if the number, names, types and order of fields declared in these classes
   1.654 +  // are the same.
   1.655 +  JavaFieldStream old_fs(the_class);
   1.656 +  JavaFieldStream new_fs(scratch_class);
   1.657 +  for (; !old_fs.done() && !new_fs.done(); old_fs.next(), new_fs.next()) {
   1.658 +    // access
   1.659 +    old_flags = old_fs.access_flags().as_short();
   1.660 +    new_flags = new_fs.access_flags().as_short();
   1.661 +    if ((old_flags ^ new_flags) & JVM_RECOGNIZED_FIELD_MODIFIERS) {
   1.662 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.663 +    }
   1.664 +    // offset
   1.665 +    if (old_fs.offset() != new_fs.offset()) {
   1.666 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.667 +    }
   1.668 +    // name and signature
   1.669 +    Symbol* name_sym1 = the_class->constants()->symbol_at(old_fs.name_index());
   1.670 +    Symbol* sig_sym1 = the_class->constants()->symbol_at(old_fs.signature_index());
   1.671 +    Symbol* name_sym2 = scratch_class->constants()->symbol_at(new_fs.name_index());
   1.672 +    Symbol* sig_sym2 = scratch_class->constants()->symbol_at(new_fs.signature_index());
   1.673 +    if (name_sym1 != name_sym2 || sig_sym1 != sig_sym2) {
   1.674 +      return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.675 +    }
   1.676 +  }
   1.677 +
   1.678 +  // If both streams aren't done then we have a differing number of
   1.679 +  // fields.
   1.680 +  if (!old_fs.done() || !new_fs.done()) {
   1.681 +    return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_SCHEMA_CHANGED;
   1.682 +  }
   1.683 +
   1.684 +  // Do a parallel walk through the old and new methods. Detect
   1.685 +  // cases where they match (exist in both), have been added in
   1.686 +  // the new methods, or have been deleted (exist only in the
   1.687 +  // old methods).  The class file parser places methods in order
   1.688 +  // by method name, but does not order overloaded methods by
   1.689 +  // signature.  In order to determine what fate befell the methods,
   1.690 +  // this code places the overloaded new methods that have matching
   1.691 +  // old methods in the same order as the old methods and places
   1.692 +  // new overloaded methods at the end of overloaded methods of
   1.693 +  // that name. The code for this order normalization is adapted
   1.694 +  // from the algorithm used in InstanceKlass::find_method().
   1.695 +  // Since we are swapping out of order entries as we find them,
   1.696 +  // we only have to search forward through the overloaded methods.
   1.697 +  // Methods which are added and have the same name as an existing
   1.698 +  // method (but different signature) will be put at the end of
   1.699 +  // the methods with that name, and the name mismatch code will
   1.700 +  // handle them.
   1.701 +  Array<Method*>* k_old_methods(the_class->methods());
   1.702 +  Array<Method*>* k_new_methods(scratch_class->methods());
   1.703 +  int n_old_methods = k_old_methods->length();
   1.704 +  int n_new_methods = k_new_methods->length();
   1.705 +  Thread* thread = Thread::current();
   1.706 +
   1.707 +  int ni = 0;
   1.708 +  int oi = 0;
   1.709 +  while (true) {
   1.710 +    Method* k_old_method;
   1.711 +    Method* k_new_method;
   1.712 +    enum { matched, added, deleted, undetermined } method_was = undetermined;
   1.713 +
   1.714 +    if (oi >= n_old_methods) {
   1.715 +      if (ni >= n_new_methods) {
   1.716 +        break; // we've looked at everything, done
   1.717 +      }
   1.718 +      // New method at the end
   1.719 +      k_new_method = k_new_methods->at(ni);
   1.720 +      method_was = added;
   1.721 +    } else if (ni >= n_new_methods) {
   1.722 +      // Old method, at the end, is deleted
   1.723 +      k_old_method = k_old_methods->at(oi);
   1.724 +      method_was = deleted;
   1.725 +    } else {
   1.726 +      // There are more methods in both the old and new lists
   1.727 +      k_old_method = k_old_methods->at(oi);
   1.728 +      k_new_method = k_new_methods->at(ni);
   1.729 +      if (k_old_method->name() != k_new_method->name()) {
   1.730 +        // Methods are sorted by method name, so a mismatch means added
   1.731 +        // or deleted
   1.732 +        if (k_old_method->name()->fast_compare(k_new_method->name()) > 0) {
   1.733 +          method_was = added;
   1.734 +        } else {
   1.735 +          method_was = deleted;
   1.736 +        }
   1.737 +      } else if (k_old_method->signature() == k_new_method->signature()) {
   1.738 +        // Both the name and signature match
   1.739 +        method_was = matched;
   1.740 +      } else {
   1.741 +        // The name matches, but the signature doesn't, which means we have to
   1.742 +        // search forward through the new overloaded methods.
   1.743 +        int nj;  // outside the loop for post-loop check
   1.744 +        for (nj = ni + 1; nj < n_new_methods; nj++) {
   1.745 +          Method* m = k_new_methods->at(nj);
   1.746 +          if (k_old_method->name() != m->name()) {
   1.747 +            // reached another method name so no more overloaded methods
   1.748 +            method_was = deleted;
   1.749 +            break;
   1.750 +          }
   1.751 +          if (k_old_method->signature() == m->signature()) {
   1.752 +            // found a match so swap the methods
   1.753 +            k_new_methods->at_put(ni, m);
   1.754 +            k_new_methods->at_put(nj, k_new_method);
   1.755 +            k_new_method = m;
   1.756 +            method_was = matched;
   1.757 +            break;
   1.758 +          }
   1.759 +        }
   1.760 +
   1.761 +        if (nj >= n_new_methods) {
   1.762 +          // reached the end without a match; so method was deleted
   1.763 +          method_was = deleted;
   1.764 +        }
   1.765 +      }
   1.766 +    }
   1.767 +
   1.768 +    switch (method_was) {
   1.769 +    case matched:
   1.770 +      // methods match, be sure modifiers do too
   1.771 +      old_flags = (jushort) k_old_method->access_flags().get_flags();
   1.772 +      new_flags = (jushort) k_new_method->access_flags().get_flags();
   1.773 +      if ((old_flags ^ new_flags) & ~(JVM_ACC_NATIVE)) {
   1.774 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_MODIFIERS_CHANGED;
   1.775 +      }
   1.776 +      {
   1.777 +        u2 new_num = k_new_method->method_idnum();
   1.778 +        u2 old_num = k_old_method->method_idnum();
   1.779 +        if (new_num != old_num) {
   1.780 +          Method* idnum_owner = scratch_class->method_with_idnum(old_num);
   1.781 +          if (idnum_owner != NULL) {
   1.782 +            // There is already a method assigned this idnum -- switch them
   1.783 +            idnum_owner->set_method_idnum(new_num);
   1.784 +          }
   1.785 +          k_new_method->set_method_idnum(old_num);
   1.786 +          if (thread->has_pending_exception()) {
   1.787 +            return JVMTI_ERROR_OUT_OF_MEMORY;
   1.788 +          }
   1.789 +        }
   1.790 +      }
   1.791 +      RC_TRACE(0x00008000, ("Method matched: new: %s [%d] == old: %s [%d]",
   1.792 +                            k_new_method->name_and_sig_as_C_string(), ni,
   1.793 +                            k_old_method->name_and_sig_as_C_string(), oi));
   1.794 +      // advance to next pair of methods
   1.795 +      ++oi;
   1.796 +      ++ni;
   1.797 +      break;
   1.798 +    case added:
   1.799 +      // method added, see if it is OK
   1.800 +      new_flags = (jushort) k_new_method->access_flags().get_flags();
   1.801 +      if ((new_flags & JVM_ACC_PRIVATE) == 0
   1.802 +           // hack: private should be treated as final, but alas
   1.803 +          || (new_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
   1.804 +         ) {
   1.805 +        // new methods must be private
   1.806 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
   1.807 +      }
   1.808 +      {
   1.809 +        u2 num = the_class->next_method_idnum();
   1.810 +        if (num == ConstMethod::UNSET_IDNUM) {
   1.811 +          // cannot add any more methods
   1.812 +          return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_ADDED;
   1.813 +        }
   1.814 +        u2 new_num = k_new_method->method_idnum();
   1.815 +        Method* idnum_owner = scratch_class->method_with_idnum(num);
   1.816 +        if (idnum_owner != NULL) {
   1.817 +          // There is already a method assigned this idnum -- switch them
   1.818 +          idnum_owner->set_method_idnum(new_num);
   1.819 +        }
   1.820 +        k_new_method->set_method_idnum(num);
   1.821 +        if (thread->has_pending_exception()) {
   1.822 +          return JVMTI_ERROR_OUT_OF_MEMORY;
   1.823 +        }
   1.824 +      }
   1.825 +      RC_TRACE(0x00008000, ("Method added: new: %s [%d]",
   1.826 +                            k_new_method->name_and_sig_as_C_string(), ni));
   1.827 +      ++ni; // advance to next new method
   1.828 +      break;
   1.829 +    case deleted:
   1.830 +      // method deleted, see if it is OK
   1.831 +      old_flags = (jushort) k_old_method->access_flags().get_flags();
   1.832 +      if ((old_flags & JVM_ACC_PRIVATE) == 0
   1.833 +           // hack: private should be treated as final, but alas
   1.834 +          || (old_flags & (JVM_ACC_FINAL|JVM_ACC_STATIC)) == 0
   1.835 +         ) {
   1.836 +        // deleted methods must be private
   1.837 +        return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_METHOD_DELETED;
   1.838 +      }
   1.839 +      RC_TRACE(0x00008000, ("Method deleted: old: %s [%d]",
   1.840 +                            k_old_method->name_and_sig_as_C_string(), oi));
   1.841 +      ++oi; // advance to next old method
   1.842 +      break;
   1.843 +    default:
   1.844 +      ShouldNotReachHere();
   1.845 +    }
   1.846 +  }
   1.847 +
   1.848 +  return JVMTI_ERROR_NONE;
   1.849 +}
   1.850 +
   1.851 +
   1.852 +// Find new constant pool index value for old constant pool index value
   1.853 +// by seaching the index map. Returns zero (0) if there is no mapped
   1.854 +// value for the old constant pool index.
   1.855 +int VM_RedefineClasses::find_new_index(int old_index) {
   1.856 +  if (_index_map_count == 0) {
   1.857 +    // map is empty so nothing can be found
   1.858 +    return 0;
   1.859 +  }
   1.860 +
   1.861 +  if (old_index < 1 || old_index >= _index_map_p->length()) {
   1.862 +    // The old_index is out of range so it is not mapped. This should
   1.863 +    // not happen in regular constant pool merging use, but it can
   1.864 +    // happen if a corrupt annotation is processed.
   1.865 +    return 0;
   1.866 +  }
   1.867 +
   1.868 +  int value = _index_map_p->at(old_index);
   1.869 +  if (value == -1) {
   1.870 +    // the old_index is not mapped
   1.871 +    return 0;
   1.872 +  }
   1.873 +
   1.874 +  return value;
   1.875 +} // end find_new_index()
   1.876 +
   1.877 +
   1.878 +// Find new bootstrap specifier index value for old bootstrap specifier index
   1.879 +// value by seaching the index map. Returns unused index (-1) if there is
   1.880 +// no mapped value for the old bootstrap specifier index.
   1.881 +int VM_RedefineClasses::find_new_operand_index(int old_index) {
   1.882 +  if (_operands_index_map_count == 0) {
   1.883 +    // map is empty so nothing can be found
   1.884 +    return -1;
   1.885 +  }
   1.886 +
   1.887 +  if (old_index == -1 || old_index >= _operands_index_map_p->length()) {
   1.888 +    // The old_index is out of range so it is not mapped.
   1.889 +    // This should not happen in regular constant pool merging use.
   1.890 +    return -1;
   1.891 +  }
   1.892 +
   1.893 +  int value = _operands_index_map_p->at(old_index);
   1.894 +  if (value == -1) {
   1.895 +    // the old_index is not mapped
   1.896 +    return -1;
   1.897 +  }
   1.898 +
   1.899 +  return value;
   1.900 +} // end find_new_operand_index()
   1.901 +
   1.902 +
   1.903 +// Returns true if the current mismatch is due to a resolved/unresolved
   1.904 +// class pair. Otherwise, returns false.
   1.905 +bool VM_RedefineClasses::is_unresolved_class_mismatch(constantPoolHandle cp1,
   1.906 +       int index1, constantPoolHandle cp2, int index2) {
   1.907 +
   1.908 +  jbyte t1 = cp1->tag_at(index1).value();
   1.909 +  if (t1 != JVM_CONSTANT_Class && t1 != JVM_CONSTANT_UnresolvedClass) {
   1.910 +    return false;  // wrong entry type; not our special case
   1.911 +  }
   1.912 +
   1.913 +  jbyte t2 = cp2->tag_at(index2).value();
   1.914 +  if (t2 != JVM_CONSTANT_Class && t2 != JVM_CONSTANT_UnresolvedClass) {
   1.915 +    return false;  // wrong entry type; not our special case
   1.916 +  }
   1.917 +
   1.918 +  if (t1 == t2) {
   1.919 +    return false;  // not a mismatch; not our special case
   1.920 +  }
   1.921 +
   1.922 +  char *s1 = cp1->klass_name_at(index1)->as_C_string();
   1.923 +  char *s2 = cp2->klass_name_at(index2)->as_C_string();
   1.924 +  if (strcmp(s1, s2) != 0) {
   1.925 +    return false;  // strings don't match; not our special case
   1.926 +  }
   1.927 +
   1.928 +  return true;  // made it through the gauntlet; this is our special case
   1.929 +} // end is_unresolved_class_mismatch()
   1.930 +
   1.931 +
   1.932 +jvmtiError VM_RedefineClasses::load_new_class_versions(TRAPS) {
   1.933 +
   1.934 +  // For consistency allocate memory using os::malloc wrapper.
   1.935 +  _scratch_classes = (Klass**)
   1.936 +    os::malloc(sizeof(Klass*) * _class_count, mtClass);
   1.937 +  if (_scratch_classes == NULL) {
   1.938 +    return JVMTI_ERROR_OUT_OF_MEMORY;
   1.939 +  }
   1.940 +  // Zero initialize the _scratch_classes array.
   1.941 +  for (int i = 0; i < _class_count; i++) {
   1.942 +    _scratch_classes[i] = NULL;
   1.943 +  }
   1.944 +
   1.945 +  ResourceMark rm(THREAD);
   1.946 +
   1.947 +  JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
   1.948 +  // state can only be NULL if the current thread is exiting which
   1.949 +  // should not happen since we're trying to do a RedefineClasses
   1.950 +  guarantee(state != NULL, "exiting thread calling load_new_class_versions");
   1.951 +  for (int i = 0; i < _class_count; i++) {
   1.952 +    // Create HandleMark so that any handles created while loading new class
   1.953 +    // versions are deleted. Constant pools are deallocated while merging
   1.954 +    // constant pools
   1.955 +    HandleMark hm(THREAD);
   1.956 +
   1.957 +    oop mirror = JNIHandles::resolve_non_null(_class_defs[i].klass);
   1.958 +    // classes for primitives cannot be redefined
   1.959 +    if (!is_modifiable_class(mirror)) {
   1.960 +      return JVMTI_ERROR_UNMODIFIABLE_CLASS;
   1.961 +    }
   1.962 +    Klass* the_class_oop = java_lang_Class::as_Klass(mirror);
   1.963 +    instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
   1.964 +    Symbol*  the_class_sym = the_class->name();
   1.965 +
   1.966 +    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
   1.967 +    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
   1.968 +      ("loading name=%s kind=%d (avail_mem=" UINT64_FORMAT "K)",
   1.969 +      the_class->external_name(), _class_load_kind,
   1.970 +      os::available_memory() >> 10));
   1.971 +
   1.972 +    ClassFileStream st((u1*) _class_defs[i].class_bytes,
   1.973 +      _class_defs[i].class_byte_count, (char *)"__VM_RedefineClasses__");
   1.974 +
   1.975 +    // Parse the stream.
   1.976 +    Handle the_class_loader(THREAD, the_class->class_loader());
   1.977 +    Handle protection_domain(THREAD, the_class->protection_domain());
   1.978 +    // Set redefined class handle in JvmtiThreadState class.
   1.979 +    // This redefined class is sent to agent event handler for class file
   1.980 +    // load hook event.
   1.981 +    state->set_class_being_redefined(&the_class, _class_load_kind);
   1.982 +
   1.983 +    Klass* k = SystemDictionary::parse_stream(the_class_sym,
   1.984 +                                                the_class_loader,
   1.985 +                                                protection_domain,
   1.986 +                                                &st,
   1.987 +                                                THREAD);
   1.988 +    // Clear class_being_redefined just to be sure.
   1.989 +    state->clear_class_being_redefined();
   1.990 +
   1.991 +    // TODO: if this is retransform, and nothing changed we can skip it
   1.992 +
   1.993 +    instanceKlassHandle scratch_class (THREAD, k);
   1.994 +
   1.995 +    // Need to clean up allocated InstanceKlass if there's an error so assign
   1.996 +    // the result here. Caller deallocates all the scratch classes in case of
   1.997 +    // an error.
   1.998 +    _scratch_classes[i] = k;
   1.999 +
  1.1000 +    if (HAS_PENDING_EXCEPTION) {
  1.1001 +      Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1002 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1003 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("parse_stream exception: '%s'",
  1.1004 +        ex_name->as_C_string()));
  1.1005 +      CLEAR_PENDING_EXCEPTION;
  1.1006 +
  1.1007 +      if (ex_name == vmSymbols::java_lang_UnsupportedClassVersionError()) {
  1.1008 +        return JVMTI_ERROR_UNSUPPORTED_VERSION;
  1.1009 +      } else if (ex_name == vmSymbols::java_lang_ClassFormatError()) {
  1.1010 +        return JVMTI_ERROR_INVALID_CLASS_FORMAT;
  1.1011 +      } else if (ex_name == vmSymbols::java_lang_ClassCircularityError()) {
  1.1012 +        return JVMTI_ERROR_CIRCULAR_CLASS_DEFINITION;
  1.1013 +      } else if (ex_name == vmSymbols::java_lang_NoClassDefFoundError()) {
  1.1014 +        // The message will be "XXX (wrong name: YYY)"
  1.1015 +        return JVMTI_ERROR_NAMES_DONT_MATCH;
  1.1016 +      } else if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1017 +        return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1018 +      } else {  // Just in case more exceptions can be thrown..
  1.1019 +        return JVMTI_ERROR_FAILS_VERIFICATION;
  1.1020 +      }
  1.1021 +    }
  1.1022 +
  1.1023 +    // Ensure class is linked before redefine
  1.1024 +    if (!the_class->is_linked()) {
  1.1025 +      the_class->link_class(THREAD);
  1.1026 +      if (HAS_PENDING_EXCEPTION) {
  1.1027 +        Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1028 +        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1029 +        RC_TRACE_WITH_THREAD(0x00000002, THREAD, ("link_class exception: '%s'",
  1.1030 +          ex_name->as_C_string()));
  1.1031 +        CLEAR_PENDING_EXCEPTION;
  1.1032 +        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1033 +          return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1034 +        } else {
  1.1035 +          return JVMTI_ERROR_INTERNAL;
  1.1036 +        }
  1.1037 +      }
  1.1038 +    }
  1.1039 +
  1.1040 +    // Do the validity checks in compare_and_normalize_class_versions()
  1.1041 +    // before verifying the byte codes. By doing these checks first, we
  1.1042 +    // limit the number of functions that require redirection from
  1.1043 +    // the_class to scratch_class. In particular, we don't have to
  1.1044 +    // modify JNI GetSuperclass() and thus won't change its performance.
  1.1045 +    jvmtiError res = compare_and_normalize_class_versions(the_class,
  1.1046 +                       scratch_class);
  1.1047 +    if (res != JVMTI_ERROR_NONE) {
  1.1048 +      return res;
  1.1049 +    }
  1.1050 +
  1.1051 +    // verify what the caller passed us
  1.1052 +    {
  1.1053 +      // The bug 6214132 caused the verification to fail.
  1.1054 +      // Information about the_class and scratch_class is temporarily
  1.1055 +      // recorded into jvmtiThreadState. This data is used to redirect
  1.1056 +      // the_class to scratch_class in the JVM_* functions called by the
  1.1057 +      // verifier. Please, refer to jvmtiThreadState.hpp for the detailed
  1.1058 +      // description.
  1.1059 +      RedefineVerifyMark rvm(&the_class, &scratch_class, state);
  1.1060 +      Verifier::verify(
  1.1061 +        scratch_class, Verifier::ThrowException, true, THREAD);
  1.1062 +    }
  1.1063 +
  1.1064 +    if (HAS_PENDING_EXCEPTION) {
  1.1065 +      Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1066 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1067 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
  1.1068 +        ("verify_byte_codes exception: '%s'", ex_name->as_C_string()));
  1.1069 +      CLEAR_PENDING_EXCEPTION;
  1.1070 +      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1071 +        return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1072 +      } else {
  1.1073 +        // tell the caller the bytecodes are bad
  1.1074 +        return JVMTI_ERROR_FAILS_VERIFICATION;
  1.1075 +      }
  1.1076 +    }
  1.1077 +
  1.1078 +    res = merge_cp_and_rewrite(the_class, scratch_class, THREAD);
  1.1079 +    if (HAS_PENDING_EXCEPTION) {
  1.1080 +      Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1081 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1082 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
  1.1083 +        ("merge_cp_and_rewrite exception: '%s'", ex_name->as_C_string()));
  1.1084 +      CLEAR_PENDING_EXCEPTION;
  1.1085 +      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1086 +        return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1087 +      } else {
  1.1088 +        return JVMTI_ERROR_INTERNAL;
  1.1089 +      }
  1.1090 +    }
  1.1091 +
  1.1092 +    if (VerifyMergedCPBytecodes) {
  1.1093 +      // verify what we have done during constant pool merging
  1.1094 +      {
  1.1095 +        RedefineVerifyMark rvm(&the_class, &scratch_class, state);
  1.1096 +        Verifier::verify(scratch_class, Verifier::ThrowException, true, THREAD);
  1.1097 +      }
  1.1098 +
  1.1099 +      if (HAS_PENDING_EXCEPTION) {
  1.1100 +        Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1101 +        // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1102 +        RC_TRACE_WITH_THREAD(0x00000002, THREAD,
  1.1103 +          ("verify_byte_codes post merge-CP exception: '%s'",
  1.1104 +          ex_name->as_C_string()));
  1.1105 +        CLEAR_PENDING_EXCEPTION;
  1.1106 +        if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1107 +          return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1108 +        } else {
  1.1109 +          // tell the caller that constant pool merging screwed up
  1.1110 +          return JVMTI_ERROR_INTERNAL;
  1.1111 +        }
  1.1112 +      }
  1.1113 +    }
  1.1114 +
  1.1115 +    Rewriter::rewrite(scratch_class, THREAD);
  1.1116 +    if (!HAS_PENDING_EXCEPTION) {
  1.1117 +      scratch_class->link_methods(THREAD);
  1.1118 +    }
  1.1119 +    if (HAS_PENDING_EXCEPTION) {
  1.1120 +      Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1121 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1122 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
  1.1123 +        ("Rewriter::rewrite or link_methods exception: '%s'", ex_name->as_C_string()));
  1.1124 +      CLEAR_PENDING_EXCEPTION;
  1.1125 +      if (ex_name == vmSymbols::java_lang_OutOfMemoryError()) {
  1.1126 +        return JVMTI_ERROR_OUT_OF_MEMORY;
  1.1127 +      } else {
  1.1128 +        return JVMTI_ERROR_INTERNAL;
  1.1129 +      }
  1.1130 +    }
  1.1131 +
  1.1132 +    // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1133 +    RC_TRACE_WITH_THREAD(0x00000001, THREAD,
  1.1134 +      ("loaded name=%s (avail_mem=" UINT64_FORMAT "K)",
  1.1135 +      the_class->external_name(), os::available_memory() >> 10));
  1.1136 +  }
  1.1137 +
  1.1138 +  return JVMTI_ERROR_NONE;
  1.1139 +}
  1.1140 +
  1.1141 +
  1.1142 +// Map old_index to new_index as needed. scratch_cp is only needed
  1.1143 +// for RC_TRACE() calls.
  1.1144 +void VM_RedefineClasses::map_index(constantPoolHandle scratch_cp,
  1.1145 +       int old_index, int new_index) {
  1.1146 +  if (find_new_index(old_index) != 0) {
  1.1147 +    // old_index is already mapped
  1.1148 +    return;
  1.1149 +  }
  1.1150 +
  1.1151 +  if (old_index == new_index) {
  1.1152 +    // no mapping is needed
  1.1153 +    return;
  1.1154 +  }
  1.1155 +
  1.1156 +  _index_map_p->at_put(old_index, new_index);
  1.1157 +  _index_map_count++;
  1.1158 +
  1.1159 +  RC_TRACE(0x00040000, ("mapped tag %d at index %d to %d",
  1.1160 +    scratch_cp->tag_at(old_index).value(), old_index, new_index));
  1.1161 +} // end map_index()
  1.1162 +
  1.1163 +
  1.1164 +// Map old_index to new_index as needed.
  1.1165 +void VM_RedefineClasses::map_operand_index(int old_index, int new_index) {
  1.1166 +  if (find_new_operand_index(old_index) != -1) {
  1.1167 +    // old_index is already mapped
  1.1168 +    return;
  1.1169 +  }
  1.1170 +
  1.1171 +  if (old_index == new_index) {
  1.1172 +    // no mapping is needed
  1.1173 +    return;
  1.1174 +  }
  1.1175 +
  1.1176 +  _operands_index_map_p->at_put(old_index, new_index);
  1.1177 +  _operands_index_map_count++;
  1.1178 +
  1.1179 +  RC_TRACE(0x00040000, ("mapped bootstrap specifier at index %d to %d", old_index, new_index));
  1.1180 +} // end map_index()
  1.1181 +
  1.1182 +
  1.1183 +// Merge old_cp and scratch_cp and return the results of the merge via
  1.1184 +// merge_cp_p. The number of entries in *merge_cp_p is returned via
  1.1185 +// merge_cp_length_p. The entries in old_cp occupy the same locations
  1.1186 +// in *merge_cp_p. Also creates a map of indices from entries in
  1.1187 +// scratch_cp to the corresponding entry in *merge_cp_p. Index map
  1.1188 +// entries are only created for entries in scratch_cp that occupy a
  1.1189 +// different location in *merged_cp_p.
  1.1190 +bool VM_RedefineClasses::merge_constant_pools(constantPoolHandle old_cp,
  1.1191 +       constantPoolHandle scratch_cp, constantPoolHandle *merge_cp_p,
  1.1192 +       int *merge_cp_length_p, TRAPS) {
  1.1193 +
  1.1194 +  if (merge_cp_p == NULL) {
  1.1195 +    assert(false, "caller must provide scratch constantPool");
  1.1196 +    return false; // robustness
  1.1197 +  }
  1.1198 +  if (merge_cp_length_p == NULL) {
  1.1199 +    assert(false, "caller must provide scratch CP length");
  1.1200 +    return false; // robustness
  1.1201 +  }
  1.1202 +  // Worst case we need old_cp->length() + scratch_cp()->length(),
  1.1203 +  // but the caller might be smart so make sure we have at least
  1.1204 +  // the minimum.
  1.1205 +  if ((*merge_cp_p)->length() < old_cp->length()) {
  1.1206 +    assert(false, "merge area too small");
  1.1207 +    return false; // robustness
  1.1208 +  }
  1.1209 +
  1.1210 +  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
  1.1211 +    ("old_cp_len=%d, scratch_cp_len=%d", old_cp->length(),
  1.1212 +    scratch_cp->length()));
  1.1213 +
  1.1214 +  {
  1.1215 +    // Pass 0:
  1.1216 +    // The old_cp is copied to *merge_cp_p; this means that any code
  1.1217 +    // using old_cp does not have to change. This work looks like a
  1.1218 +    // perfect fit for ConstantPool*::copy_cp_to(), but we need to
  1.1219 +    // handle one special case:
  1.1220 +    // - revert JVM_CONSTANT_Class to JVM_CONSTANT_UnresolvedClass
  1.1221 +    // This will make verification happy.
  1.1222 +
  1.1223 +    int old_i;  // index into old_cp
  1.1224 +
  1.1225 +    // index zero (0) is not used in constantPools
  1.1226 +    for (old_i = 1; old_i < old_cp->length(); old_i++) {
  1.1227 +      // leave debugging crumb
  1.1228 +      jbyte old_tag = old_cp->tag_at(old_i).value();
  1.1229 +      switch (old_tag) {
  1.1230 +      case JVM_CONSTANT_Class:
  1.1231 +      case JVM_CONSTANT_UnresolvedClass:
  1.1232 +        // revert the copy to JVM_CONSTANT_UnresolvedClass
  1.1233 +        // May be resolving while calling this so do the same for
  1.1234 +        // JVM_CONSTANT_UnresolvedClass (klass_name_at() deals with transition)
  1.1235 +        (*merge_cp_p)->unresolved_klass_at_put(old_i,
  1.1236 +          old_cp->klass_name_at(old_i));
  1.1237 +        break;
  1.1238 +
  1.1239 +      case JVM_CONSTANT_Double:
  1.1240 +      case JVM_CONSTANT_Long:
  1.1241 +        // just copy the entry to *merge_cp_p, but double and long take
  1.1242 +        // two constant pool entries
  1.1243 +        ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
  1.1244 +        old_i++;
  1.1245 +        break;
  1.1246 +
  1.1247 +      default:
  1.1248 +        // just copy the entry to *merge_cp_p
  1.1249 +        ConstantPool::copy_entry_to(old_cp, old_i, *merge_cp_p, old_i, CHECK_0);
  1.1250 +        break;
  1.1251 +      }
  1.1252 +    } // end for each old_cp entry
  1.1253 +
  1.1254 +    ConstantPool::copy_operands(old_cp, *merge_cp_p, CHECK_0);
  1.1255 +    (*merge_cp_p)->extend_operands(scratch_cp, CHECK_0);
  1.1256 +
  1.1257 +    // We don't need to sanity check that *merge_cp_length_p is within
  1.1258 +    // *merge_cp_p bounds since we have the minimum on-entry check above.
  1.1259 +    (*merge_cp_length_p) = old_i;
  1.1260 +  }
  1.1261 +
  1.1262 +  // merge_cp_len should be the same as old_cp->length() at this point
  1.1263 +  // so this trace message is really a "warm-and-breathing" message.
  1.1264 +  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1265 +    ("after pass 0: merge_cp_len=%d", *merge_cp_length_p));
  1.1266 +
  1.1267 +  int scratch_i;  // index into scratch_cp
  1.1268 +  {
  1.1269 +    // Pass 1a:
  1.1270 +    // Compare scratch_cp entries to the old_cp entries that we have
  1.1271 +    // already copied to *merge_cp_p. In this pass, we are eliminating
  1.1272 +    // exact duplicates (matching entry at same index) so we only
  1.1273 +    // compare entries in the common indice range.
  1.1274 +    int increment = 1;
  1.1275 +    int pass1a_length = MIN2(old_cp->length(), scratch_cp->length());
  1.1276 +    for (scratch_i = 1; scratch_i < pass1a_length; scratch_i += increment) {
  1.1277 +      switch (scratch_cp->tag_at(scratch_i).value()) {
  1.1278 +      case JVM_CONSTANT_Double:
  1.1279 +      case JVM_CONSTANT_Long:
  1.1280 +        // double and long take two constant pool entries
  1.1281 +        increment = 2;
  1.1282 +        break;
  1.1283 +
  1.1284 +      default:
  1.1285 +        increment = 1;
  1.1286 +        break;
  1.1287 +      }
  1.1288 +
  1.1289 +      bool match = scratch_cp->compare_entry_to(scratch_i, *merge_cp_p,
  1.1290 +        scratch_i, CHECK_0);
  1.1291 +      if (match) {
  1.1292 +        // found a match at the same index so nothing more to do
  1.1293 +        continue;
  1.1294 +      } else if (is_unresolved_class_mismatch(scratch_cp, scratch_i,
  1.1295 +                                              *merge_cp_p, scratch_i)) {
  1.1296 +        // The mismatch in compare_entry_to() above is because of a
  1.1297 +        // resolved versus unresolved class entry at the same index
  1.1298 +        // with the same string value. Since Pass 0 reverted any
  1.1299 +        // class entries to unresolved class entries in *merge_cp_p,
  1.1300 +        // we go with the unresolved class entry.
  1.1301 +        continue;
  1.1302 +      }
  1.1303 +
  1.1304 +      int found_i = scratch_cp->find_matching_entry(scratch_i, *merge_cp_p,
  1.1305 +        CHECK_0);
  1.1306 +      if (found_i != 0) {
  1.1307 +        guarantee(found_i != scratch_i,
  1.1308 +          "compare_entry_to() and find_matching_entry() do not agree");
  1.1309 +
  1.1310 +        // Found a matching entry somewhere else in *merge_cp_p so
  1.1311 +        // just need a mapping entry.
  1.1312 +        map_index(scratch_cp, scratch_i, found_i);
  1.1313 +        continue;
  1.1314 +      }
  1.1315 +
  1.1316 +      // The find_matching_entry() call above could fail to find a match
  1.1317 +      // due to a resolved versus unresolved class or string entry situation
  1.1318 +      // like we solved above with the is_unresolved_*_mismatch() calls.
  1.1319 +      // However, we would have to call is_unresolved_*_mismatch() over
  1.1320 +      // all of *merge_cp_p (potentially) and that doesn't seem to be
  1.1321 +      // worth the time.
  1.1322 +
  1.1323 +      // No match found so we have to append this entry and any unique
  1.1324 +      // referenced entries to *merge_cp_p.
  1.1325 +      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
  1.1326 +        CHECK_0);
  1.1327 +    }
  1.1328 +  }
  1.1329 +
  1.1330 +  RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1331 +    ("after pass 1a: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
  1.1332 +    *merge_cp_length_p, scratch_i, _index_map_count));
  1.1333 +
  1.1334 +  if (scratch_i < scratch_cp->length()) {
  1.1335 +    // Pass 1b:
  1.1336 +    // old_cp is smaller than scratch_cp so there are entries in
  1.1337 +    // scratch_cp that we have not yet processed. We take care of
  1.1338 +    // those now.
  1.1339 +    int increment = 1;
  1.1340 +    for (; scratch_i < scratch_cp->length(); scratch_i += increment) {
  1.1341 +      switch (scratch_cp->tag_at(scratch_i).value()) {
  1.1342 +      case JVM_CONSTANT_Double:
  1.1343 +      case JVM_CONSTANT_Long:
  1.1344 +        // double and long take two constant pool entries
  1.1345 +        increment = 2;
  1.1346 +        break;
  1.1347 +
  1.1348 +      default:
  1.1349 +        increment = 1;
  1.1350 +        break;
  1.1351 +      }
  1.1352 +
  1.1353 +      int found_i =
  1.1354 +        scratch_cp->find_matching_entry(scratch_i, *merge_cp_p, CHECK_0);
  1.1355 +      if (found_i != 0) {
  1.1356 +        // Found a matching entry somewhere else in *merge_cp_p so
  1.1357 +        // just need a mapping entry.
  1.1358 +        map_index(scratch_cp, scratch_i, found_i);
  1.1359 +        continue;
  1.1360 +      }
  1.1361 +
  1.1362 +      // No match found so we have to append this entry and any unique
  1.1363 +      // referenced entries to *merge_cp_p.
  1.1364 +      append_entry(scratch_cp, scratch_i, merge_cp_p, merge_cp_length_p,
  1.1365 +        CHECK_0);
  1.1366 +    }
  1.1367 +
  1.1368 +    RC_TRACE_WITH_THREAD(0x00020000, THREAD,
  1.1369 +      ("after pass 1b: merge_cp_len=%d, scratch_i=%d, index_map_len=%d",
  1.1370 +      *merge_cp_length_p, scratch_i, _index_map_count));
  1.1371 +  }
  1.1372 +  finalize_operands_merge(*merge_cp_p, THREAD);
  1.1373 +
  1.1374 +  return true;
  1.1375 +} // end merge_constant_pools()
  1.1376 +
  1.1377 +
  1.1378 +// Scoped object to clean up the constant pool(s) created for merging
  1.1379 +class MergeCPCleaner {
  1.1380 +  ClassLoaderData*   _loader_data;
  1.1381 +  ConstantPool*      _cp;
  1.1382 +  ConstantPool*      _scratch_cp;
  1.1383 + public:
  1.1384 +  MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
  1.1385 +                 _loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
  1.1386 +  ~MergeCPCleaner() {
  1.1387 +    _loader_data->add_to_deallocate_list(_cp);
  1.1388 +    if (_scratch_cp != NULL) {
  1.1389 +      _loader_data->add_to_deallocate_list(_scratch_cp);
  1.1390 +    }
  1.1391 +  }
  1.1392 +  void add_scratch_cp(ConstantPool* scratch_cp) { _scratch_cp = scratch_cp; }
  1.1393 +};
  1.1394 +
  1.1395 +// Merge constant pools between the_class and scratch_class and
  1.1396 +// potentially rewrite bytecodes in scratch_class to use the merged
  1.1397 +// constant pool.
  1.1398 +jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
  1.1399 +             instanceKlassHandle the_class, instanceKlassHandle scratch_class,
  1.1400 +             TRAPS) {
  1.1401 +  // worst case merged constant pool length is old and new combined
  1.1402 +  int merge_cp_length = the_class->constants()->length()
  1.1403 +        + scratch_class->constants()->length();
  1.1404 +
  1.1405 +  // Constant pools are not easily reused so we allocate a new one
  1.1406 +  // each time.
  1.1407 +  // merge_cp is created unsafe for concurrent GC processing.  It
  1.1408 +  // should be marked safe before discarding it. Even though
  1.1409 +  // garbage,  if it crosses a card boundary, it may be scanned
  1.1410 +  // in order to find the start of the first complete object on the card.
  1.1411 +  ClassLoaderData* loader_data = the_class->class_loader_data();
  1.1412 +  ConstantPool* merge_cp_oop =
  1.1413 +    ConstantPool::allocate(loader_data,
  1.1414 +                           merge_cp_length,
  1.1415 +                           CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
  1.1416 +  MergeCPCleaner cp_cleaner(loader_data, merge_cp_oop);
  1.1417 +
  1.1418 +  HandleMark hm(THREAD);  // make sure handles are cleared before
  1.1419 +                          // MergeCPCleaner clears out merge_cp_oop
  1.1420 +  constantPoolHandle merge_cp(THREAD, merge_cp_oop);
  1.1421 +
  1.1422 +  // Get constants() from the old class because it could have been rewritten
  1.1423 +  // while we were at a safepoint allocating a new constant pool.
  1.1424 +  constantPoolHandle old_cp(THREAD, the_class->constants());
  1.1425 +  constantPoolHandle scratch_cp(THREAD, scratch_class->constants());
  1.1426 +
  1.1427 +  // If the length changed, the class was redefined out from under us. Return
  1.1428 +  // an error.
  1.1429 +  if (merge_cp_length != the_class->constants()->length()
  1.1430 +         + scratch_class->constants()->length()) {
  1.1431 +    return JVMTI_ERROR_INTERNAL;
  1.1432 +  }
  1.1433 +
  1.1434 +  // Update the version number of the constant pool
  1.1435 +  merge_cp->increment_and_save_version(old_cp->version());
  1.1436 +
  1.1437 +  ResourceMark rm(THREAD);
  1.1438 +  _index_map_count = 0;
  1.1439 +  _index_map_p = new intArray(scratch_cp->length(), -1);
  1.1440 +
  1.1441 +  _operands_cur_length = ConstantPool::operand_array_length(old_cp->operands());
  1.1442 +  _operands_index_map_count = 0;
  1.1443 +  _operands_index_map_p = new intArray(
  1.1444 +    ConstantPool::operand_array_length(scratch_cp->operands()), -1);
  1.1445 +
  1.1446 +  // reference to the cp holder is needed for copy_operands()
  1.1447 +  merge_cp->set_pool_holder(scratch_class());
  1.1448 +  bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
  1.1449 +                  &merge_cp_length, THREAD);
  1.1450 +  merge_cp->set_pool_holder(NULL);
  1.1451 +
  1.1452 +  if (!result) {
  1.1453 +    // The merge can fail due to memory allocation failure or due
  1.1454 +    // to robustness checks.
  1.1455 +    return JVMTI_ERROR_INTERNAL;
  1.1456 +  }
  1.1457 +
  1.1458 +  RC_TRACE_WITH_THREAD(0x00010000, THREAD,
  1.1459 +    ("merge_cp_len=%d, index_map_len=%d", merge_cp_length, _index_map_count));
  1.1460 +
  1.1461 +  if (_index_map_count == 0) {
  1.1462 +    // there is nothing to map between the new and merged constant pools
  1.1463 +
  1.1464 +    if (old_cp->length() == scratch_cp->length()) {
  1.1465 +      // The old and new constant pools are the same length and the
  1.1466 +      // index map is empty. This means that the three constant pools
  1.1467 +      // are equivalent (but not the same). Unfortunately, the new
  1.1468 +      // constant pool has not gone through link resolution nor have
  1.1469 +      // the new class bytecodes gone through constant pool cache
  1.1470 +      // rewriting so we can't use the old constant pool with the new
  1.1471 +      // class.
  1.1472 +
  1.1473 +      // toss the merged constant pool at return
  1.1474 +    } else if (old_cp->length() < scratch_cp->length()) {
  1.1475 +      // The old constant pool has fewer entries than the new constant
  1.1476 +      // pool and the index map is empty. This means the new constant
  1.1477 +      // pool is a superset of the old constant pool. However, the old
  1.1478 +      // class bytecodes have already gone through constant pool cache
  1.1479 +      // rewriting so we can't use the new constant pool with the old
  1.1480 +      // class.
  1.1481 +
  1.1482 +      // toss the merged constant pool at return
  1.1483 +    } else {
  1.1484 +      // The old constant pool has more entries than the new constant
  1.1485 +      // pool and the index map is empty. This means that both the old
  1.1486 +      // and merged constant pools are supersets of the new constant
  1.1487 +      // pool.
  1.1488 +
  1.1489 +      // Replace the new constant pool with a shrunken copy of the
  1.1490 +      // merged constant pool
  1.1491 +      set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
  1.1492 +                            CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
  1.1493 +      // The new constant pool replaces scratch_cp so have cleaner clean it up.
  1.1494 +      // It can't be cleaned up while there are handles to it.
  1.1495 +      cp_cleaner.add_scratch_cp(scratch_cp());
  1.1496 +    }
  1.1497 +  } else {
  1.1498 +    if (RC_TRACE_ENABLED(0x00040000)) {
  1.1499 +      // don't want to loop unless we are tracing
  1.1500 +      int count = 0;
  1.1501 +      for (int i = 1; i < _index_map_p->length(); i++) {
  1.1502 +        int value = _index_map_p->at(i);
  1.1503 +
  1.1504 +        if (value != -1) {
  1.1505 +          RC_TRACE_WITH_THREAD(0x00040000, THREAD,
  1.1506 +            ("index_map[%d]: old=%d new=%d", count, i, value));
  1.1507 +          count++;
  1.1508 +        }
  1.1509 +      }
  1.1510 +    }
  1.1511 +
  1.1512 +    // We have entries mapped between the new and merged constant pools
  1.1513 +    // so we have to rewrite some constant pool references.
  1.1514 +    if (!rewrite_cp_refs(scratch_class, THREAD)) {
  1.1515 +      return JVMTI_ERROR_INTERNAL;
  1.1516 +    }
  1.1517 +
  1.1518 +    // Replace the new constant pool with a shrunken copy of the
  1.1519 +    // merged constant pool so now the rewritten bytecodes have
  1.1520 +    // valid references; the previous new constant pool will get
  1.1521 +    // GCed.
  1.1522 +    set_new_constant_pool(loader_data, scratch_class, merge_cp, merge_cp_length,
  1.1523 +                          CHECK_(JVMTI_ERROR_OUT_OF_MEMORY));
  1.1524 +    // The new constant pool replaces scratch_cp so have cleaner clean it up.
  1.1525 +    // It can't be cleaned up while there are handles to it.
  1.1526 +    cp_cleaner.add_scratch_cp(scratch_cp());
  1.1527 +  }
  1.1528 +
  1.1529 +  return JVMTI_ERROR_NONE;
  1.1530 +} // end merge_cp_and_rewrite()
  1.1531 +
  1.1532 +
  1.1533 +// Rewrite constant pool references in klass scratch_class.
  1.1534 +bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
  1.1535 +       TRAPS) {
  1.1536 +
  1.1537 +  // rewrite constant pool references in the methods:
  1.1538 +  if (!rewrite_cp_refs_in_methods(scratch_class, THREAD)) {
  1.1539 +    // propagate failure back to caller
  1.1540 +    return false;
  1.1541 +  }
  1.1542 +
  1.1543 +  // rewrite constant pool references in the class_annotations:
  1.1544 +  if (!rewrite_cp_refs_in_class_annotations(scratch_class, THREAD)) {
  1.1545 +    // propagate failure back to caller
  1.1546 +    return false;
  1.1547 +  }
  1.1548 +
  1.1549 +  // rewrite constant pool references in the fields_annotations:
  1.1550 +  if (!rewrite_cp_refs_in_fields_annotations(scratch_class, THREAD)) {
  1.1551 +    // propagate failure back to caller
  1.1552 +    return false;
  1.1553 +  }
  1.1554 +
  1.1555 +  // rewrite constant pool references in the methods_annotations:
  1.1556 +  if (!rewrite_cp_refs_in_methods_annotations(scratch_class, THREAD)) {
  1.1557 +    // propagate failure back to caller
  1.1558 +    return false;
  1.1559 +  }
  1.1560 +
  1.1561 +  // rewrite constant pool references in the methods_parameter_annotations:
  1.1562 +  if (!rewrite_cp_refs_in_methods_parameter_annotations(scratch_class,
  1.1563 +         THREAD)) {
  1.1564 +    // propagate failure back to caller
  1.1565 +    return false;
  1.1566 +  }
  1.1567 +
  1.1568 +  // rewrite constant pool references in the methods_default_annotations:
  1.1569 +  if (!rewrite_cp_refs_in_methods_default_annotations(scratch_class,
  1.1570 +         THREAD)) {
  1.1571 +    // propagate failure back to caller
  1.1572 +    return false;
  1.1573 +  }
  1.1574 +
  1.1575 +  // rewrite source file name index:
  1.1576 +  u2 source_file_name_idx = scratch_class->source_file_name_index();
  1.1577 +  if (source_file_name_idx != 0) {
  1.1578 +    u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
  1.1579 +    if (new_source_file_name_idx != 0) {
  1.1580 +      scratch_class->set_source_file_name_index(new_source_file_name_idx);
  1.1581 +    }
  1.1582 +  }
  1.1583 +
  1.1584 +  // rewrite class generic signature index:
  1.1585 +  u2 generic_signature_index = scratch_class->generic_signature_index();
  1.1586 +  if (generic_signature_index != 0) {
  1.1587 +    u2 new_generic_signature_index = find_new_index(generic_signature_index);
  1.1588 +    if (new_generic_signature_index != 0) {
  1.1589 +      scratch_class->set_generic_signature_index(new_generic_signature_index);
  1.1590 +    }
  1.1591 +  }
  1.1592 +
  1.1593 +  return true;
  1.1594 +} // end rewrite_cp_refs()
  1.1595 +
  1.1596 +// Rewrite constant pool references in the methods.
  1.1597 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods(
  1.1598 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1599 +
  1.1600 +  Array<Method*>* methods = scratch_class->methods();
  1.1601 +
  1.1602 +  if (methods == NULL || methods->length() == 0) {
  1.1603 +    // no methods so nothing to do
  1.1604 +    return true;
  1.1605 +  }
  1.1606 +
  1.1607 +  // rewrite constant pool references in the methods:
  1.1608 +  for (int i = methods->length() - 1; i >= 0; i--) {
  1.1609 +    methodHandle method(THREAD, methods->at(i));
  1.1610 +    methodHandle new_method;
  1.1611 +    rewrite_cp_refs_in_method(method, &new_method, THREAD);
  1.1612 +    if (!new_method.is_null()) {
  1.1613 +      // the method has been replaced so save the new method version
  1.1614 +      // even in the case of an exception.  original method is on the
  1.1615 +      // deallocation list.
  1.1616 +      methods->at_put(i, new_method());
  1.1617 +    }
  1.1618 +    if (HAS_PENDING_EXCEPTION) {
  1.1619 +      Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
  1.1620 +      // RC_TRACE_WITH_THREAD macro has an embedded ResourceMark
  1.1621 +      RC_TRACE_WITH_THREAD(0x00000002, THREAD,
  1.1622 +        ("rewrite_cp_refs_in_method exception: '%s'", ex_name->as_C_string()));
  1.1623 +      // Need to clear pending exception here as the super caller sets
  1.1624 +      // the JVMTI_ERROR_INTERNAL if the returned value is false.
  1.1625 +      CLEAR_PENDING_EXCEPTION;
  1.1626 +      return false;
  1.1627 +    }
  1.1628 +  }
  1.1629 +
  1.1630 +  return true;
  1.1631 +}
  1.1632 +
  1.1633 +
  1.1634 +// Rewrite constant pool references in the specific method. This code
  1.1635 +// was adapted from Rewriter::rewrite_method().
  1.1636 +void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
  1.1637 +       methodHandle *new_method_p, TRAPS) {
  1.1638 +
  1.1639 +  *new_method_p = methodHandle();  // default is no new method
  1.1640 +
  1.1641 +  // We cache a pointer to the bytecodes here in code_base. If GC
  1.1642 +  // moves the Method*, then the bytecodes will also move which
  1.1643 +  // will likely cause a crash. We create a No_Safepoint_Verifier
  1.1644 +  // object to detect whether we pass a possible safepoint in this
  1.1645 +  // code block.
  1.1646 +  No_Safepoint_Verifier nsv;
  1.1647 +
  1.1648 +  // Bytecodes and their length
  1.1649 +  address code_base = method->code_base();
  1.1650 +  int code_length = method->code_size();
  1.1651 +
  1.1652 +  int bc_length;
  1.1653 +  for (int bci = 0; bci < code_length; bci += bc_length) {
  1.1654 +    address bcp = code_base + bci;
  1.1655 +    Bytecodes::Code c = (Bytecodes::Code)(*bcp);
  1.1656 +
  1.1657 +    bc_length = Bytecodes::length_for(c);
  1.1658 +    if (bc_length == 0) {
  1.1659 +      // More complicated bytecodes report a length of zero so
  1.1660 +      // we have to try again a slightly different way.
  1.1661 +      bc_length = Bytecodes::length_at(method(), bcp);
  1.1662 +    }
  1.1663 +
  1.1664 +    assert(bc_length != 0, "impossible bytecode length");
  1.1665 +
  1.1666 +    switch (c) {
  1.1667 +      case Bytecodes::_ldc:
  1.1668 +      {
  1.1669 +        int cp_index = *(bcp + 1);
  1.1670 +        int new_index = find_new_index(cp_index);
  1.1671 +
  1.1672 +        if (StressLdcRewrite && new_index == 0) {
  1.1673 +          // If we are stressing ldc -> ldc_w rewriting, then we
  1.1674 +          // always need a new_index value.
  1.1675 +          new_index = cp_index;
  1.1676 +        }
  1.1677 +        if (new_index != 0) {
  1.1678 +          // the original index is mapped so we have more work to do
  1.1679 +          if (!StressLdcRewrite && new_index <= max_jubyte) {
  1.1680 +            // The new value can still use ldc instead of ldc_w
  1.1681 +            // unless we are trying to stress ldc -> ldc_w rewriting
  1.1682 +            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1683 +              ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
  1.1684 +              bcp, cp_index, new_index));
  1.1685 +            *(bcp + 1) = new_index;
  1.1686 +          } else {
  1.1687 +            RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1688 +              ("%s->ldc_w@" INTPTR_FORMAT " old=%d, new=%d",
  1.1689 +              Bytecodes::name(c), bcp, cp_index, new_index));
  1.1690 +            // the new value needs ldc_w instead of ldc
  1.1691 +            u_char inst_buffer[4]; // max instruction size is 4 bytes
  1.1692 +            bcp = (address)inst_buffer;
  1.1693 +            // construct new instruction sequence
  1.1694 +            *bcp = Bytecodes::_ldc_w;
  1.1695 +            bcp++;
  1.1696 +            // Rewriter::rewrite_method() does not rewrite ldc -> ldc_w.
  1.1697 +            // See comment below for difference between put_Java_u2()
  1.1698 +            // and put_native_u2().
  1.1699 +            Bytes::put_Java_u2(bcp, new_index);
  1.1700 +
  1.1701 +            Relocator rc(method, NULL /* no RelocatorListener needed */);
  1.1702 +            methodHandle m;
  1.1703 +            {
  1.1704 +              Pause_No_Safepoint_Verifier pnsv(&nsv);
  1.1705 +
  1.1706 +              // ldc is 2 bytes and ldc_w is 3 bytes
  1.1707 +              m = rc.insert_space_at(bci, 3, inst_buffer, CHECK);
  1.1708 +            }
  1.1709 +
  1.1710 +            // return the new method so that the caller can update
  1.1711 +            // the containing class
  1.1712 +            *new_method_p = method = m;
  1.1713 +            // switch our bytecode processing loop from the old method
  1.1714 +            // to the new method
  1.1715 +            code_base = method->code_base();
  1.1716 +            code_length = method->code_size();
  1.1717 +            bcp = code_base + bci;
  1.1718 +            c = (Bytecodes::Code)(*bcp);
  1.1719 +            bc_length = Bytecodes::length_for(c);
  1.1720 +            assert(bc_length != 0, "sanity check");
  1.1721 +          } // end we need ldc_w instead of ldc
  1.1722 +        } // end if there is a mapped index
  1.1723 +      } break;
  1.1724 +
  1.1725 +      // these bytecodes have a two-byte constant pool index
  1.1726 +      case Bytecodes::_anewarray      : // fall through
  1.1727 +      case Bytecodes::_checkcast      : // fall through
  1.1728 +      case Bytecodes::_getfield       : // fall through
  1.1729 +      case Bytecodes::_getstatic      : // fall through
  1.1730 +      case Bytecodes::_instanceof     : // fall through
  1.1731 +      case Bytecodes::_invokedynamic  : // fall through
  1.1732 +      case Bytecodes::_invokeinterface: // fall through
  1.1733 +      case Bytecodes::_invokespecial  : // fall through
  1.1734 +      case Bytecodes::_invokestatic   : // fall through
  1.1735 +      case Bytecodes::_invokevirtual  : // fall through
  1.1736 +      case Bytecodes::_ldc_w          : // fall through
  1.1737 +      case Bytecodes::_ldc2_w         : // fall through
  1.1738 +      case Bytecodes::_multianewarray : // fall through
  1.1739 +      case Bytecodes::_new            : // fall through
  1.1740 +      case Bytecodes::_putfield       : // fall through
  1.1741 +      case Bytecodes::_putstatic      :
  1.1742 +      {
  1.1743 +        address p = bcp + 1;
  1.1744 +        int cp_index = Bytes::get_Java_u2(p);
  1.1745 +        int new_index = find_new_index(cp_index);
  1.1746 +        if (new_index != 0) {
  1.1747 +          // the original index is mapped so update w/ new value
  1.1748 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.1749 +            ("%s@" INTPTR_FORMAT " old=%d, new=%d", Bytecodes::name(c),
  1.1750 +            bcp, cp_index, new_index));
  1.1751 +          // Rewriter::rewrite_method() uses put_native_u2() in this
  1.1752 +          // situation because it is reusing the constant pool index
  1.1753 +          // location for a native index into the ConstantPoolCache.
  1.1754 +          // Since we are updating the constant pool index prior to
  1.1755 +          // verification and ConstantPoolCache initialization, we
  1.1756 +          // need to keep the new index in Java byte order.
  1.1757 +          Bytes::put_Java_u2(p, new_index);
  1.1758 +        }
  1.1759 +      } break;
  1.1760 +    }
  1.1761 +  } // end for each bytecode
  1.1762 +
  1.1763 +  // We also need to rewrite the parameter name indexes, if there is
  1.1764 +  // method parameter data present
  1.1765 +  if(method->has_method_parameters()) {
  1.1766 +    const int len = method->method_parameters_length();
  1.1767 +    MethodParametersElement* elem = method->method_parameters_start();
  1.1768 +
  1.1769 +    for (int i = 0; i < len; i++) {
  1.1770 +      const u2 cp_index = elem[i].name_cp_index;
  1.1771 +      const u2 new_cp_index = find_new_index(cp_index);
  1.1772 +      if (new_cp_index != 0) {
  1.1773 +        elem[i].name_cp_index = new_cp_index;
  1.1774 +      }
  1.1775 +    }
  1.1776 +  }
  1.1777 +} // end rewrite_cp_refs_in_method()
  1.1778 +
  1.1779 +
  1.1780 +// Rewrite constant pool references in the class_annotations field.
  1.1781 +bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(
  1.1782 +       instanceKlassHandle scratch_class, TRAPS) {
  1.1783 +
  1.1784 +  AnnotationArray* class_annotations = scratch_class->class_annotations();
  1.1785 +  if (class_annotations == NULL || class_annotations->length() == 0) {
  1.1786 +    // no class_annotations so nothing to do
  1.1787 +    return true;
  1.1788 +  }
  1.1789 +
  1.1790 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1791 +    ("class_annotations length=%d", class_annotations->length()));
  1.1792 +
  1.1793 +  int byte_i = 0;  // byte index into class_annotations
  1.1794 +  return rewrite_cp_refs_in_annotations_typeArray(class_annotations, byte_i,
  1.1795 +           THREAD);
  1.1796 +}
  1.1797 +
  1.1798 +
  1.1799 +// Rewrite constant pool references in an annotations typeArray. This
  1.1800 +// "structure" is adapted from the RuntimeVisibleAnnotations_attribute
  1.1801 +// that is described in section 4.8.15 of the 2nd-edition of the VM spec:
  1.1802 +//
  1.1803 +// annotations_typeArray {
  1.1804 +//   u2 num_annotations;
  1.1805 +//   annotation annotations[num_annotations];
  1.1806 +// }
  1.1807 +//
  1.1808 +bool VM_RedefineClasses::rewrite_cp_refs_in_annotations_typeArray(
  1.1809 +       AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1810 +
  1.1811 +  if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1812 +    // not enough room for num_annotations field
  1.1813 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1814 +      ("length() is too small for num_annotations field"));
  1.1815 +    return false;
  1.1816 +  }
  1.1817 +
  1.1818 +  u2 num_annotations = Bytes::get_Java_u2((address)
  1.1819 +                         annotations_typeArray->adr_at(byte_i_ref));
  1.1820 +  byte_i_ref += 2;
  1.1821 +
  1.1822 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1823 +    ("num_annotations=%d", num_annotations));
  1.1824 +
  1.1825 +  int calc_num_annotations = 0;
  1.1826 +  for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
  1.1827 +    if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
  1.1828 +           byte_i_ref, THREAD)) {
  1.1829 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1830 +        ("bad annotation_struct at %d", calc_num_annotations));
  1.1831 +      // propagate failure back to caller
  1.1832 +      return false;
  1.1833 +    }
  1.1834 +  }
  1.1835 +  assert(num_annotations == calc_num_annotations, "sanity check");
  1.1836 +
  1.1837 +  return true;
  1.1838 +} // end rewrite_cp_refs_in_annotations_typeArray()
  1.1839 +
  1.1840 +
  1.1841 +// Rewrite constant pool references in the annotation struct portion of
  1.1842 +// an annotations_typeArray. This "structure" is from section 4.8.15 of
  1.1843 +// the 2nd-edition of the VM spec:
  1.1844 +//
  1.1845 +// struct annotation {
  1.1846 +//   u2 type_index;
  1.1847 +//   u2 num_element_value_pairs;
  1.1848 +//   {
  1.1849 +//     u2 element_name_index;
  1.1850 +//     element_value value;
  1.1851 +//   } element_value_pairs[num_element_value_pairs];
  1.1852 +// }
  1.1853 +//
  1.1854 +bool VM_RedefineClasses::rewrite_cp_refs_in_annotation_struct(
  1.1855 +       AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1856 +  if ((byte_i_ref + 2 + 2) > annotations_typeArray->length()) {
  1.1857 +    // not enough room for smallest annotation_struct
  1.1858 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1859 +      ("length() is too small for annotation_struct"));
  1.1860 +    return false;
  1.1861 +  }
  1.1862 +
  1.1863 +  u2 type_index = rewrite_cp_ref_in_annotation_data(annotations_typeArray,
  1.1864 +                    byte_i_ref, "mapped old type_index=%d", THREAD);
  1.1865 +
  1.1866 +  u2 num_element_value_pairs = Bytes::get_Java_u2((address)
  1.1867 +                                 annotations_typeArray->adr_at(byte_i_ref));
  1.1868 +  byte_i_ref += 2;
  1.1869 +
  1.1870 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1871 +    ("type_index=%d  num_element_value_pairs=%d", type_index,
  1.1872 +    num_element_value_pairs));
  1.1873 +
  1.1874 +  int calc_num_element_value_pairs = 0;
  1.1875 +  for (; calc_num_element_value_pairs < num_element_value_pairs;
  1.1876 +       calc_num_element_value_pairs++) {
  1.1877 +    if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1878 +      // not enough room for another element_name_index, let alone
  1.1879 +      // the rest of another component
  1.1880 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1881 +        ("length() is too small for element_name_index"));
  1.1882 +      return false;
  1.1883 +    }
  1.1884 +
  1.1885 +    u2 element_name_index = rewrite_cp_ref_in_annotation_data(
  1.1886 +                              annotations_typeArray, byte_i_ref,
  1.1887 +                              "mapped old element_name_index=%d", THREAD);
  1.1888 +
  1.1889 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1890 +      ("element_name_index=%d", element_name_index));
  1.1891 +
  1.1892 +    if (!rewrite_cp_refs_in_element_value(annotations_typeArray,
  1.1893 +           byte_i_ref, THREAD)) {
  1.1894 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1895 +        ("bad element_value at %d", calc_num_element_value_pairs));
  1.1896 +      // propagate failure back to caller
  1.1897 +      return false;
  1.1898 +    }
  1.1899 +  } // end for each component
  1.1900 +  assert(num_element_value_pairs == calc_num_element_value_pairs,
  1.1901 +    "sanity check");
  1.1902 +
  1.1903 +  return true;
  1.1904 +} // end rewrite_cp_refs_in_annotation_struct()
  1.1905 +
  1.1906 +
  1.1907 +// Rewrite a constant pool reference at the current position in
  1.1908 +// annotations_typeArray if needed. Returns the original constant
  1.1909 +// pool reference if a rewrite was not needed or the new constant
  1.1910 +// pool reference if a rewrite was needed.
  1.1911 +PRAGMA_DIAG_PUSH
  1.1912 +PRAGMA_FORMAT_NONLITERAL_IGNORED
  1.1913 +u2 VM_RedefineClasses::rewrite_cp_ref_in_annotation_data(
  1.1914 +     AnnotationArray* annotations_typeArray, int &byte_i_ref,
  1.1915 +     const char * trace_mesg, TRAPS) {
  1.1916 +
  1.1917 +  address cp_index_addr = (address)
  1.1918 +    annotations_typeArray->adr_at(byte_i_ref);
  1.1919 +  u2 old_cp_index = Bytes::get_Java_u2(cp_index_addr);
  1.1920 +  u2 new_cp_index = find_new_index(old_cp_index);
  1.1921 +  if (new_cp_index != 0) {
  1.1922 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD, (trace_mesg, old_cp_index));
  1.1923 +    Bytes::put_Java_u2(cp_index_addr, new_cp_index);
  1.1924 +    old_cp_index = new_cp_index;
  1.1925 +  }
  1.1926 +  byte_i_ref += 2;
  1.1927 +  return old_cp_index;
  1.1928 +}
  1.1929 +PRAGMA_DIAG_POP
  1.1930 +
  1.1931 +
  1.1932 +// Rewrite constant pool references in the element_value portion of an
  1.1933 +// annotations_typeArray. This "structure" is from section 4.8.15.1 of
  1.1934 +// the 2nd-edition of the VM spec:
  1.1935 +//
  1.1936 +// struct element_value {
  1.1937 +//   u1 tag;
  1.1938 +//   union {
  1.1939 +//     u2 const_value_index;
  1.1940 +//     {
  1.1941 +//       u2 type_name_index;
  1.1942 +//       u2 const_name_index;
  1.1943 +//     } enum_const_value;
  1.1944 +//     u2 class_info_index;
  1.1945 +//     annotation annotation_value;
  1.1946 +//     struct {
  1.1947 +//       u2 num_values;
  1.1948 +//       element_value values[num_values];
  1.1949 +//     } array_value;
  1.1950 +//   } value;
  1.1951 +// }
  1.1952 +//
  1.1953 +bool VM_RedefineClasses::rewrite_cp_refs_in_element_value(
  1.1954 +       AnnotationArray* annotations_typeArray, int &byte_i_ref, TRAPS) {
  1.1955 +
  1.1956 +  if ((byte_i_ref + 1) > annotations_typeArray->length()) {
  1.1957 +    // not enough room for a tag let alone the rest of an element_value
  1.1958 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1959 +      ("length() is too small for a tag"));
  1.1960 +    return false;
  1.1961 +  }
  1.1962 +
  1.1963 +  u1 tag = annotations_typeArray->at(byte_i_ref);
  1.1964 +  byte_i_ref++;
  1.1965 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("tag='%c'", tag));
  1.1966 +
  1.1967 +  switch (tag) {
  1.1968 +    // These BaseType tag values are from Table 4.2 in VM spec:
  1.1969 +    case 'B':  // byte
  1.1970 +    case 'C':  // char
  1.1971 +    case 'D':  // double
  1.1972 +    case 'F':  // float
  1.1973 +    case 'I':  // int
  1.1974 +    case 'J':  // long
  1.1975 +    case 'S':  // short
  1.1976 +    case 'Z':  // boolean
  1.1977 +
  1.1978 +    // The remaining tag values are from Table 4.8 in the 2nd-edition of
  1.1979 +    // the VM spec:
  1.1980 +    case 's':
  1.1981 +    {
  1.1982 +      // For the above tag values (including the BaseType values),
  1.1983 +      // value.const_value_index is right union field.
  1.1984 +
  1.1985 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.1986 +        // not enough room for a const_value_index
  1.1987 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1988 +          ("length() is too small for a const_value_index"));
  1.1989 +        return false;
  1.1990 +      }
  1.1991 +
  1.1992 +      u2 const_value_index = rewrite_cp_ref_in_annotation_data(
  1.1993 +                               annotations_typeArray, byte_i_ref,
  1.1994 +                               "mapped old const_value_index=%d", THREAD);
  1.1995 +
  1.1996 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.1997 +        ("const_value_index=%d", const_value_index));
  1.1998 +    } break;
  1.1999 +
  1.2000 +    case 'e':
  1.2001 +    {
  1.2002 +      // for the above tag value, value.enum_const_value is right union field
  1.2003 +
  1.2004 +      if ((byte_i_ref + 4) > annotations_typeArray->length()) {
  1.2005 +        // not enough room for a enum_const_value
  1.2006 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2007 +          ("length() is too small for a enum_const_value"));
  1.2008 +        return false;
  1.2009 +      }
  1.2010 +
  1.2011 +      u2 type_name_index = rewrite_cp_ref_in_annotation_data(
  1.2012 +                             annotations_typeArray, byte_i_ref,
  1.2013 +                             "mapped old type_name_index=%d", THREAD);
  1.2014 +
  1.2015 +      u2 const_name_index = rewrite_cp_ref_in_annotation_data(
  1.2016 +                              annotations_typeArray, byte_i_ref,
  1.2017 +                              "mapped old const_name_index=%d", THREAD);
  1.2018 +
  1.2019 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2020 +        ("type_name_index=%d  const_name_index=%d", type_name_index,
  1.2021 +        const_name_index));
  1.2022 +    } break;
  1.2023 +
  1.2024 +    case 'c':
  1.2025 +    {
  1.2026 +      // for the above tag value, value.class_info_index is right union field
  1.2027 +
  1.2028 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.2029 +        // not enough room for a class_info_index
  1.2030 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2031 +          ("length() is too small for a class_info_index"));
  1.2032 +        return false;
  1.2033 +      }
  1.2034 +
  1.2035 +      u2 class_info_index = rewrite_cp_ref_in_annotation_data(
  1.2036 +                              annotations_typeArray, byte_i_ref,
  1.2037 +                              "mapped old class_info_index=%d", THREAD);
  1.2038 +
  1.2039 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2040 +        ("class_info_index=%d", class_info_index));
  1.2041 +    } break;
  1.2042 +
  1.2043 +    case '@':
  1.2044 +      // For the above tag value, value.attr_value is the right union
  1.2045 +      // field. This is a nested annotation.
  1.2046 +      if (!rewrite_cp_refs_in_annotation_struct(annotations_typeArray,
  1.2047 +             byte_i_ref, THREAD)) {
  1.2048 +        // propagate failure back to caller
  1.2049 +        return false;
  1.2050 +      }
  1.2051 +      break;
  1.2052 +
  1.2053 +    case '[':
  1.2054 +    {
  1.2055 +      if ((byte_i_ref + 2) > annotations_typeArray->length()) {
  1.2056 +        // not enough room for a num_values field
  1.2057 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2058 +          ("length() is too small for a num_values field"));
  1.2059 +        return false;
  1.2060 +      }
  1.2061 +
  1.2062 +      // For the above tag value, value.array_value is the right union
  1.2063 +      // field. This is an array of nested element_value.
  1.2064 +      u2 num_values = Bytes::get_Java_u2((address)
  1.2065 +                        annotations_typeArray->adr_at(byte_i_ref));
  1.2066 +      byte_i_ref += 2;
  1.2067 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("num_values=%d", num_values));
  1.2068 +
  1.2069 +      int calc_num_values = 0;
  1.2070 +      for (; calc_num_values < num_values; calc_num_values++) {
  1.2071 +        if (!rewrite_cp_refs_in_element_value(
  1.2072 +               annotations_typeArray, byte_i_ref, THREAD)) {
  1.2073 +          RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2074 +            ("bad nested element_value at %d", calc_num_values));
  1.2075 +          // propagate failure back to caller
  1.2076 +          return false;
  1.2077 +        }
  1.2078 +      }
  1.2079 +      assert(num_values == calc_num_values, "sanity check");
  1.2080 +    } break;
  1.2081 +
  1.2082 +    default:
  1.2083 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("bad tag=0x%x", tag));
  1.2084 +      return false;
  1.2085 +  } // end decode tag field
  1.2086 +
  1.2087 +  return true;
  1.2088 +} // end rewrite_cp_refs_in_element_value()
  1.2089 +
  1.2090 +
  1.2091 +// Rewrite constant pool references in a fields_annotations field.
  1.2092 +bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
  1.2093 +       instanceKlassHandle scratch_class, TRAPS) {
  1.2094 +
  1.2095 +  Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
  1.2096 +
  1.2097 +  if (fields_annotations == NULL || fields_annotations->length() == 0) {
  1.2098 +    // no fields_annotations so nothing to do
  1.2099 +    return true;
  1.2100 +  }
  1.2101 +
  1.2102 +  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2103 +    ("fields_annotations length=%d", fields_annotations->length()));
  1.2104 +
  1.2105 +  for (int i = 0; i < fields_annotations->length(); i++) {
  1.2106 +    AnnotationArray* field_annotations = fields_annotations->at(i);
  1.2107 +    if (field_annotations == NULL || field_annotations->length() == 0) {
  1.2108 +      // this field does not have any annotations so skip it
  1.2109 +      continue;
  1.2110 +    }
  1.2111 +
  1.2112 +    int byte_i = 0;  // byte index into field_annotations
  1.2113 +    if (!rewrite_cp_refs_in_annotations_typeArray(field_annotations, byte_i,
  1.2114 +           THREAD)) {
  1.2115 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2116 +        ("bad field_annotations at %d", i));
  1.2117 +      // propagate failure back to caller
  1.2118 +      return false;
  1.2119 +    }
  1.2120 +  }
  1.2121 +
  1.2122 +  return true;
  1.2123 +} // end rewrite_cp_refs_in_fields_annotations()
  1.2124 +
  1.2125 +
  1.2126 +// Rewrite constant pool references in a methods_annotations field.
  1.2127 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
  1.2128 +       instanceKlassHandle scratch_class, TRAPS) {
  1.2129 +
  1.2130 +  for (int i = 0; i < scratch_class->methods()->length(); i++) {
  1.2131 +    Method* m = scratch_class->methods()->at(i);
  1.2132 +    AnnotationArray* method_annotations = m->constMethod()->method_annotations();
  1.2133 +
  1.2134 +    if (method_annotations == NULL || method_annotations->length() == 0) {
  1.2135 +      // this method does not have any annotations so skip it
  1.2136 +      continue;
  1.2137 +    }
  1.2138 +
  1.2139 +    int byte_i = 0;  // byte index into method_annotations
  1.2140 +    if (!rewrite_cp_refs_in_annotations_typeArray(method_annotations, byte_i,
  1.2141 +           THREAD)) {
  1.2142 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2143 +        ("bad method_annotations at %d", i));
  1.2144 +      // propagate failure back to caller
  1.2145 +      return false;
  1.2146 +    }
  1.2147 +  }
  1.2148 +
  1.2149 +  return true;
  1.2150 +} // end rewrite_cp_refs_in_methods_annotations()
  1.2151 +
  1.2152 +
  1.2153 +// Rewrite constant pool references in a methods_parameter_annotations
  1.2154 +// field. This "structure" is adapted from the
  1.2155 +// RuntimeVisibleParameterAnnotations_attribute described in section
  1.2156 +// 4.8.17 of the 2nd-edition of the VM spec:
  1.2157 +//
  1.2158 +// methods_parameter_annotations_typeArray {
  1.2159 +//   u1 num_parameters;
  1.2160 +//   {
  1.2161 +//     u2 num_annotations;
  1.2162 +//     annotation annotations[num_annotations];
  1.2163 +//   } parameter_annotations[num_parameters];
  1.2164 +// }
  1.2165 +//
  1.2166 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
  1.2167 +       instanceKlassHandle scratch_class, TRAPS) {
  1.2168 +
  1.2169 +  for (int i = 0; i < scratch_class->methods()->length(); i++) {
  1.2170 +    Method* m = scratch_class->methods()->at(i);
  1.2171 +    AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
  1.2172 +    if (method_parameter_annotations == NULL
  1.2173 +        || method_parameter_annotations->length() == 0) {
  1.2174 +      // this method does not have any parameter annotations so skip it
  1.2175 +      continue;
  1.2176 +    }
  1.2177 +
  1.2178 +    if (method_parameter_annotations->length() < 1) {
  1.2179 +      // not enough room for a num_parameters field
  1.2180 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2181 +        ("length() is too small for a num_parameters field at %d", i));
  1.2182 +      return false;
  1.2183 +    }
  1.2184 +
  1.2185 +    int byte_i = 0;  // byte index into method_parameter_annotations
  1.2186 +
  1.2187 +    u1 num_parameters = method_parameter_annotations->at(byte_i);
  1.2188 +    byte_i++;
  1.2189 +
  1.2190 +    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2191 +      ("num_parameters=%d", num_parameters));
  1.2192 +
  1.2193 +    int calc_num_parameters = 0;
  1.2194 +    for (; calc_num_parameters < num_parameters; calc_num_parameters++) {
  1.2195 +      if (!rewrite_cp_refs_in_annotations_typeArray(
  1.2196 +             method_parameter_annotations, byte_i, THREAD)) {
  1.2197 +        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2198 +          ("bad method_parameter_annotations at %d", calc_num_parameters));
  1.2199 +        // propagate failure back to caller
  1.2200 +        return false;
  1.2201 +      }
  1.2202 +    }
  1.2203 +    assert(num_parameters == calc_num_parameters, "sanity check");
  1.2204 +  }
  1.2205 +
  1.2206 +  return true;
  1.2207 +} // end rewrite_cp_refs_in_methods_parameter_annotations()
  1.2208 +
  1.2209 +
  1.2210 +// Rewrite constant pool references in a methods_default_annotations
  1.2211 +// field. This "structure" is adapted from the AnnotationDefault_attribute
  1.2212 +// that is described in section 4.8.19 of the 2nd-edition of the VM spec:
  1.2213 +//
  1.2214 +// methods_default_annotations_typeArray {
  1.2215 +//   element_value default_value;
  1.2216 +// }
  1.2217 +//
  1.2218 +bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
  1.2219 +       instanceKlassHandle scratch_class, TRAPS) {
  1.2220 +
  1.2221 +  for (int i = 0; i < scratch_class->methods()->length(); i++) {
  1.2222 +    Method* m = scratch_class->methods()->at(i);
  1.2223 +    AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
  1.2224 +    if (method_default_annotations == NULL
  1.2225 +        || method_default_annotations->length() == 0) {
  1.2226 +      // this method does not have any default annotations so skip it
  1.2227 +      continue;
  1.2228 +    }
  1.2229 +
  1.2230 +    int byte_i = 0;  // byte index into method_default_annotations
  1.2231 +
  1.2232 +    if (!rewrite_cp_refs_in_element_value(
  1.2233 +           method_default_annotations, byte_i, THREAD)) {
  1.2234 +      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
  1.2235 +        ("bad default element_value at %d", i));
  1.2236 +      // propagate failure back to caller
  1.2237 +      return false;
  1.2238 +    }
  1.2239 +  }
  1.2240 +
  1.2241 +  return true;
  1.2242 +} // end rewrite_cp_refs_in_methods_default_annotations()
  1.2243 +
  1.2244 +
  1.2245 +// Rewrite constant pool references in the method's stackmap table.
  1.2246 +// These "structures" are adapted from the StackMapTable_attribute that
  1.2247 +// is described in section 4.8.4 of the 6.0 version of the VM spec
  1.2248 +// (dated 2005.10.26):
  1.2249 +// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
  1.2250 +//
  1.2251 +// stack_map {
  1.2252 +//   u2 number_of_entries;
  1.2253 +//   stack_map_frame entries[number_of_entries];
  1.2254 +// }
  1.2255 +//
  1.2256 +void VM_RedefineClasses::rewrite_cp_refs_in_stack_map_table(
  1.2257 +       methodHandle method, TRAPS) {
  1.2258 +
  1.2259 +  if (!method->has_stackmap_table()) {
  1.2260 +    return;
  1.2261 +  }
  1.2262 +
  1.2263 +  AnnotationArray* stackmap_data = method->stackmap_data();
  1.2264 +  address stackmap_p = (address)stackmap_data->adr_at(0);
  1.2265 +  address stackmap_end = stackmap_p + stackmap_data->length();
  1.2266 +
  1.2267 +  assert(stackmap_p + 2 <= stackmap_end, "no room for number_of_entries");
  1.2268 +  u2 number_of_entries = Bytes::get_Java_u2(stackmap_p);
  1.2269 +  stackmap_p += 2;
  1.2270 +
  1.2271 +  RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2272 +    ("number_of_entries=%u", number_of_entries));
  1.2273 +
  1.2274 +  // walk through each stack_map_frame
  1.2275 +  u2 calc_number_of_entries = 0;
  1.2276 +  for (; calc_number_of_entries < number_of_entries; calc_number_of_entries++) {
  1.2277 +    // The stack_map_frame structure is a u1 frame_type followed by
  1.2278 +    // 0 or more bytes of data:
  1.2279 +    //
  1.2280 +    // union stack_map_frame {
  1.2281 +    //   same_frame;
  1.2282 +    //   same_locals_1_stack_item_frame;
  1.2283 +    //   same_locals_1_stack_item_frame_extended;
  1.2284 +    //   chop_frame;
  1.2285 +    //   same_frame_extended;
  1.2286 +    //   append_frame;
  1.2287 +    //   full_frame;
  1.2288 +    // }
  1.2289 +
  1.2290 +    assert(stackmap_p + 1 <= stackmap_end, "no room for frame_type");
  1.2291 +    // The Linux compiler does not like frame_type to be u1 or u2. It
  1.2292 +    // issues the following warning for the first if-statement below:
  1.2293 +    //
  1.2294 +    // "warning: comparison is always true due to limited range of data type"
  1.2295 +    //
  1.2296 +    u4 frame_type = *stackmap_p;
  1.2297 +    stackmap_p++;
  1.2298 +
  1.2299 +    // same_frame {
  1.2300 +    //   u1 frame_type = SAME; /* 0-63 */
  1.2301 +    // }
  1.2302 +    if (frame_type >= 0 && frame_type <= 63) {
  1.2303 +      // nothing more to do for same_frame
  1.2304 +    }
  1.2305 +
  1.2306 +    // same_locals_1_stack_item_frame {
  1.2307 +    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM; /* 64-127 */
  1.2308 +    //   verification_type_info stack[1];
  1.2309 +    // }
  1.2310 +    else if (frame_type >= 64 && frame_type <= 127) {
  1.2311 +      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2312 +        calc_number_of_entries, frame_type, THREAD);
  1.2313 +    }
  1.2314 +
  1.2315 +    // reserved for future use
  1.2316 +    else if (frame_type >= 128 && frame_type <= 246) {
  1.2317 +      // nothing more to do for reserved frame_types
  1.2318 +    }
  1.2319 +
  1.2320 +    // same_locals_1_stack_item_frame_extended {
  1.2321 +    //   u1 frame_type = SAME_LOCALS_1_STACK_ITEM_EXTENDED; /* 247 */
  1.2322 +    //   u2 offset_delta;
  1.2323 +    //   verification_type_info stack[1];
  1.2324 +    // }
  1.2325 +    else if (frame_type == 247) {
  1.2326 +      stackmap_p += 2;
  1.2327 +      rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2328 +        calc_number_of_entries, frame_type, THREAD);
  1.2329 +    }
  1.2330 +
  1.2331 +    // chop_frame {
  1.2332 +    //   u1 frame_type = CHOP; /* 248-250 */
  1.2333 +    //   u2 offset_delta;
  1.2334 +    // }
  1.2335 +    else if (frame_type >= 248 && frame_type <= 250) {
  1.2336 +      stackmap_p += 2;
  1.2337 +    }
  1.2338 +
  1.2339 +    // same_frame_extended {
  1.2340 +    //   u1 frame_type = SAME_FRAME_EXTENDED; /* 251*/
  1.2341 +    //   u2 offset_delta;
  1.2342 +    // }
  1.2343 +    else if (frame_type == 251) {
  1.2344 +      stackmap_p += 2;
  1.2345 +    }
  1.2346 +
  1.2347 +    // append_frame {
  1.2348 +    //   u1 frame_type = APPEND; /* 252-254 */
  1.2349 +    //   u2 offset_delta;
  1.2350 +    //   verification_type_info locals[frame_type - 251];
  1.2351 +    // }
  1.2352 +    else if (frame_type >= 252 && frame_type <= 254) {
  1.2353 +      assert(stackmap_p + 2 <= stackmap_end,
  1.2354 +        "no room for offset_delta");
  1.2355 +      stackmap_p += 2;
  1.2356 +      u1 len = frame_type - 251;
  1.2357 +      for (u1 i = 0; i < len; i++) {
  1.2358 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2359 +          calc_number_of_entries, frame_type, THREAD);
  1.2360 +      }
  1.2361 +    }
  1.2362 +
  1.2363 +    // full_frame {
  1.2364 +    //   u1 frame_type = FULL_FRAME; /* 255 */
  1.2365 +    //   u2 offset_delta;
  1.2366 +    //   u2 number_of_locals;
  1.2367 +    //   verification_type_info locals[number_of_locals];
  1.2368 +    //   u2 number_of_stack_items;
  1.2369 +    //   verification_type_info stack[number_of_stack_items];
  1.2370 +    // }
  1.2371 +    else if (frame_type == 255) {
  1.2372 +      assert(stackmap_p + 2 + 2 <= stackmap_end,
  1.2373 +        "no room for smallest full_frame");
  1.2374 +      stackmap_p += 2;
  1.2375 +
  1.2376 +      u2 number_of_locals = Bytes::get_Java_u2(stackmap_p);
  1.2377 +      stackmap_p += 2;
  1.2378 +
  1.2379 +      for (u2 locals_i = 0; locals_i < number_of_locals; locals_i++) {
  1.2380 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2381 +          calc_number_of_entries, frame_type, THREAD);
  1.2382 +      }
  1.2383 +
  1.2384 +      // Use the largest size for the number_of_stack_items, but only get
  1.2385 +      // the right number of bytes.
  1.2386 +      u2 number_of_stack_items = Bytes::get_Java_u2(stackmap_p);
  1.2387 +      stackmap_p += 2;
  1.2388 +
  1.2389 +      for (u2 stack_i = 0; stack_i < number_of_stack_items; stack_i++) {
  1.2390 +        rewrite_cp_refs_in_verification_type_info(stackmap_p, stackmap_end,
  1.2391 +          calc_number_of_entries, frame_type, THREAD);
  1.2392 +      }
  1.2393 +    }
  1.2394 +  } // end while there is a stack_map_frame
  1.2395 +  assert(number_of_entries == calc_number_of_entries, "sanity check");
  1.2396 +} // end rewrite_cp_refs_in_stack_map_table()
  1.2397 +
  1.2398 +
  1.2399 +// Rewrite constant pool references in the verification type info
  1.2400 +// portion of the method's stackmap table. These "structures" are
  1.2401 +// adapted from the StackMapTable_attribute that is described in
  1.2402 +// section 4.8.4 of the 6.0 version of the VM spec (dated 2005.10.26):
  1.2403 +// file:///net/quincunx.sfbay/export/gbracha/ClassFile-Java6.pdf
  1.2404 +//
  1.2405 +// The verification_type_info structure is a u1 tag followed by 0 or
  1.2406 +// more bytes of data:
  1.2407 +//
  1.2408 +// union verification_type_info {
  1.2409 +//   Top_variable_info;
  1.2410 +//   Integer_variable_info;
  1.2411 +//   Float_variable_info;
  1.2412 +//   Long_variable_info;
  1.2413 +//   Double_variable_info;
  1.2414 +//   Null_variable_info;
  1.2415 +//   UninitializedThis_variable_info;
  1.2416 +//   Object_variable_info;
  1.2417 +//   Uninitialized_variable_info;
  1.2418 +// }
  1.2419 +//
  1.2420 +void VM_RedefineClasses::rewrite_cp_refs_in_verification_type_info(
  1.2421 +       address& stackmap_p_ref, address stackmap_end, u2 frame_i,
  1.2422 +       u1 frame_type, TRAPS) {
  1.2423 +
  1.2424 +  assert(stackmap_p_ref + 1 <= stackmap_end, "no room for tag");
  1.2425 +  u1 tag = *stackmap_p_ref;
  1.2426 +  stackmap_p_ref++;
  1.2427 +
  1.2428 +  switch (tag) {
  1.2429 +  // Top_variable_info {
  1.2430 +  //   u1 tag = ITEM_Top; /* 0 */
  1.2431 +  // }
  1.2432 +  // verificationType.hpp has zero as ITEM_Bogus instead of ITEM_Top
  1.2433 +  case 0:  // fall through
  1.2434 +
  1.2435 +  // Integer_variable_info {
  1.2436 +  //   u1 tag = ITEM_Integer; /* 1 */
  1.2437 +  // }
  1.2438 +  case ITEM_Integer:  // fall through
  1.2439 +
  1.2440 +  // Float_variable_info {
  1.2441 +  //   u1 tag = ITEM_Float; /* 2 */
  1.2442 +  // }
  1.2443 +  case ITEM_Float:  // fall through
  1.2444 +
  1.2445 +  // Double_variable_info {
  1.2446 +  //   u1 tag = ITEM_Double; /* 3 */
  1.2447 +  // }
  1.2448 +  case ITEM_Double:  // fall through
  1.2449 +
  1.2450 +  // Long_variable_info {
  1.2451 +  //   u1 tag = ITEM_Long; /* 4 */
  1.2452 +  // }
  1.2453 +  case ITEM_Long:  // fall through
  1.2454 +
  1.2455 +  // Null_variable_info {
  1.2456 +  //   u1 tag = ITEM_Null; /* 5 */
  1.2457 +  // }
  1.2458 +  case ITEM_Null:  // fall through
  1.2459 +
  1.2460 +  // UninitializedThis_variable_info {
  1.2461 +  //   u1 tag = ITEM_UninitializedThis; /* 6 */
  1.2462 +  // }
  1.2463 +  case ITEM_UninitializedThis:
  1.2464 +    // nothing more to do for the above tag types
  1.2465 +    break;
  1.2466 +
  1.2467 +  // Object_variable_info {
  1.2468 +  //   u1 tag = ITEM_Object; /* 7 */
  1.2469 +  //   u2 cpool_index;
  1.2470 +  // }
  1.2471 +  case ITEM_Object:
  1.2472 +  {
  1.2473 +    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for cpool_index");
  1.2474 +    u2 cpool_index = Bytes::get_Java_u2(stackmap_p_ref);
  1.2475 +    u2 new_cp_index = find_new_index(cpool_index);
  1.2476 +    if (new_cp_index != 0) {
  1.2477 +      RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2478 +        ("mapped old cpool_index=%d", cpool_index));
  1.2479 +      Bytes::put_Java_u2(stackmap_p_ref, new_cp_index);
  1.2480 +      cpool_index = new_cp_index;
  1.2481 +    }
  1.2482 +    stackmap_p_ref += 2;
  1.2483 +
  1.2484 +    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2485 +      ("frame_i=%u, frame_type=%u, cpool_index=%d", frame_i,
  1.2486 +      frame_type, cpool_index));
  1.2487 +  } break;
  1.2488 +
  1.2489 +  // Uninitialized_variable_info {
  1.2490 +  //   u1 tag = ITEM_Uninitialized; /* 8 */
  1.2491 +  //   u2 offset;
  1.2492 +  // }
  1.2493 +  case ITEM_Uninitialized:
  1.2494 +    assert(stackmap_p_ref + 2 <= stackmap_end, "no room for offset");
  1.2495 +    stackmap_p_ref += 2;
  1.2496 +    break;
  1.2497 +
  1.2498 +  default:
  1.2499 +    RC_TRACE_WITH_THREAD(0x04000000, THREAD,
  1.2500 +      ("frame_i=%u, frame_type=%u, bad tag=0x%x", frame_i, frame_type, tag));
  1.2501 +    ShouldNotReachHere();
  1.2502 +    break;
  1.2503 +  } // end switch (tag)
  1.2504 +} // end rewrite_cp_refs_in_verification_type_info()
  1.2505 +
  1.2506 +
  1.2507 +// Change the constant pool associated with klass scratch_class to
  1.2508 +// scratch_cp. If shrink is true, then scratch_cp_length elements
  1.2509 +// are copied from scratch_cp to a smaller constant pool and the
  1.2510 +// smaller constant pool is associated with scratch_class.
  1.2511 +void VM_RedefineClasses::set_new_constant_pool(
  1.2512 +       ClassLoaderData* loader_data,
  1.2513 +       instanceKlassHandle scratch_class, constantPoolHandle scratch_cp,
  1.2514 +       int scratch_cp_length, TRAPS) {
  1.2515 +  assert(scratch_cp->length() >= scratch_cp_length, "sanity check");
  1.2516 +
  1.2517 +  // scratch_cp is a merged constant pool and has enough space for a
  1.2518 +  // worst case merge situation. We want to associate the minimum
  1.2519 +  // sized constant pool with the klass to save space.
  1.2520 +  ConstantPool* cp = ConstantPool::allocate(loader_data, scratch_cp_length, CHECK);
  1.2521 +  constantPoolHandle smaller_cp(THREAD, cp);
  1.2522 +
  1.2523 +  // preserve version() value in the smaller copy
  1.2524 +  int version = scratch_cp->version();
  1.2525 +  assert(version != 0, "sanity check");
  1.2526 +  smaller_cp->set_version(version);
  1.2527 +
  1.2528 +  // attach klass to new constant pool
  1.2529 +  // reference to the cp holder is needed for copy_operands()
  1.2530 +  smaller_cp->set_pool_holder(scratch_class());
  1.2531 +
  1.2532 +  scratch_cp->copy_cp_to(1, scratch_cp_length - 1, smaller_cp, 1, THREAD);
  1.2533 +  if (HAS_PENDING_EXCEPTION) {
  1.2534 +    // Exception is handled in the caller
  1.2535 +    loader_data->add_to_deallocate_list(smaller_cp());
  1.2536 +    return;
  1.2537 +  }
  1.2538 +  scratch_cp = smaller_cp;
  1.2539 +
  1.2540 +  // attach new constant pool to klass
  1.2541 +  scratch_class->set_constants(scratch_cp());
  1.2542 +
  1.2543 +  int i;  // for portability
  1.2544 +
  1.2545 +  // update each field in klass to use new constant pool indices as needed
  1.2546 +  for (JavaFieldStream fs(scratch_class); !fs.done(); fs.next()) {
  1.2547 +    jshort cur_index = fs.name_index();
  1.2548 +    jshort new_index = find_new_index(cur_index);
  1.2549 +    if (new_index != 0) {
  1.2550 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2551 +        ("field-name_index change: %d to %d", cur_index, new_index));
  1.2552 +      fs.set_name_index(new_index);
  1.2553 +    }
  1.2554 +    cur_index = fs.signature_index();
  1.2555 +    new_index = find_new_index(cur_index);
  1.2556 +    if (new_index != 0) {
  1.2557 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2558 +        ("field-signature_index change: %d to %d", cur_index, new_index));
  1.2559 +      fs.set_signature_index(new_index);
  1.2560 +    }
  1.2561 +    cur_index = fs.initval_index();
  1.2562 +    new_index = find_new_index(cur_index);
  1.2563 +    if (new_index != 0) {
  1.2564 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2565 +        ("field-initval_index change: %d to %d", cur_index, new_index));
  1.2566 +      fs.set_initval_index(new_index);
  1.2567 +    }
  1.2568 +    cur_index = fs.generic_signature_index();
  1.2569 +    new_index = find_new_index(cur_index);
  1.2570 +    if (new_index != 0) {
  1.2571 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2572 +        ("field-generic_signature change: %d to %d", cur_index, new_index));
  1.2573 +      fs.set_generic_signature_index(new_index);
  1.2574 +    }
  1.2575 +  } // end for each field
  1.2576 +
  1.2577 +  // Update constant pool indices in the inner classes info to use
  1.2578 +  // new constant indices as needed. The inner classes info is a
  1.2579 +  // quadruple:
  1.2580 +  // (inner_class_info, outer_class_info, inner_name, inner_access_flags)
  1.2581 +  InnerClassesIterator iter(scratch_class);
  1.2582 +  for (; !iter.done(); iter.next()) {
  1.2583 +    int cur_index = iter.inner_class_info_index();
  1.2584 +    if (cur_index == 0) {
  1.2585 +      continue;  // JVM spec. allows null inner class refs so skip it
  1.2586 +    }
  1.2587 +    int new_index = find_new_index(cur_index);
  1.2588 +    if (new_index != 0) {
  1.2589 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2590 +        ("inner_class_info change: %d to %d", cur_index, new_index));
  1.2591 +      iter.set_inner_class_info_index(new_index);
  1.2592 +    }
  1.2593 +    cur_index = iter.outer_class_info_index();
  1.2594 +    new_index = find_new_index(cur_index);
  1.2595 +    if (new_index != 0) {
  1.2596 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2597 +        ("outer_class_info change: %d to %d", cur_index, new_index));
  1.2598 +      iter.set_outer_class_info_index(new_index);
  1.2599 +    }
  1.2600 +    cur_index = iter.inner_name_index();
  1.2601 +    new_index = find_new_index(cur_index);
  1.2602 +    if (new_index != 0) {
  1.2603 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2604 +        ("inner_name change: %d to %d", cur_index, new_index));
  1.2605 +      iter.set_inner_name_index(new_index);
  1.2606 +    }
  1.2607 +  } // end for each inner class
  1.2608 +
  1.2609 +  // Attach each method in klass to the new constant pool and update
  1.2610 +  // to use new constant pool indices as needed:
  1.2611 +  Array<Method*>* methods = scratch_class->methods();
  1.2612 +  for (i = methods->length() - 1; i >= 0; i--) {
  1.2613 +    methodHandle method(THREAD, methods->at(i));
  1.2614 +    method->set_constants(scratch_cp());
  1.2615 +
  1.2616 +    int new_index = find_new_index(method->name_index());
  1.2617 +    if (new_index != 0) {
  1.2618 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2619 +        ("method-name_index change: %d to %d", method->name_index(),
  1.2620 +        new_index));
  1.2621 +      method->set_name_index(new_index);
  1.2622 +    }
  1.2623 +    new_index = find_new_index(method->signature_index());
  1.2624 +    if (new_index != 0) {
  1.2625 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2626 +        ("method-signature_index change: %d to %d",
  1.2627 +        method->signature_index(), new_index));
  1.2628 +      method->set_signature_index(new_index);
  1.2629 +    }
  1.2630 +    new_index = find_new_index(method->generic_signature_index());
  1.2631 +    if (new_index != 0) {
  1.2632 +      RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2633 +        ("method-generic_signature_index change: %d to %d",
  1.2634 +        method->generic_signature_index(), new_index));
  1.2635 +      method->set_generic_signature_index(new_index);
  1.2636 +    }
  1.2637 +
  1.2638 +    // Update constant pool indices in the method's checked exception
  1.2639 +    // table to use new constant indices as needed.
  1.2640 +    int cext_length = method->checked_exceptions_length();
  1.2641 +    if (cext_length > 0) {
  1.2642 +      CheckedExceptionElement * cext_table =
  1.2643 +        method->checked_exceptions_start();
  1.2644 +      for (int j = 0; j < cext_length; j++) {
  1.2645 +        int cur_index = cext_table[j].class_cp_index;
  1.2646 +        int new_index = find_new_index(cur_index);
  1.2647 +        if (new_index != 0) {
  1.2648 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2649 +            ("cext-class_cp_index change: %d to %d", cur_index, new_index));
  1.2650 +          cext_table[j].class_cp_index = (u2)new_index;
  1.2651 +        }
  1.2652 +      } // end for each checked exception table entry
  1.2653 +    } // end if there are checked exception table entries
  1.2654 +
  1.2655 +    // Update each catch type index in the method's exception table
  1.2656 +    // to use new constant pool indices as needed. The exception table
  1.2657 +    // holds quadruple entries of the form:
  1.2658 +    //   (beg_bci, end_bci, handler_bci, klass_index)
  1.2659 +
  1.2660 +    ExceptionTable ex_table(method());
  1.2661 +    int ext_length = ex_table.length();
  1.2662 +
  1.2663 +    for (int j = 0; j < ext_length; j ++) {
  1.2664 +      int cur_index = ex_table.catch_type_index(j);
  1.2665 +      int new_index = find_new_index(cur_index);
  1.2666 +      if (new_index != 0) {
  1.2667 +        RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2668 +          ("ext-klass_index change: %d to %d", cur_index, new_index));
  1.2669 +        ex_table.set_catch_type_index(j, new_index);
  1.2670 +      }
  1.2671 +    } // end for each exception table entry
  1.2672 +
  1.2673 +    // Update constant pool indices in the method's local variable
  1.2674 +    // table to use new constant indices as needed. The local variable
  1.2675 +    // table hold sextuple entries of the form:
  1.2676 +    // (start_pc, length, name_index, descriptor_index, signature_index, slot)
  1.2677 +    int lvt_length = method->localvariable_table_length();
  1.2678 +    if (lvt_length > 0) {
  1.2679 +      LocalVariableTableElement * lv_table =
  1.2680 +        method->localvariable_table_start();
  1.2681 +      for (int j = 0; j < lvt_length; j++) {
  1.2682 +        int cur_index = lv_table[j].name_cp_index;
  1.2683 +        int new_index = find_new_index(cur_index);
  1.2684 +        if (new_index != 0) {
  1.2685 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2686 +            ("lvt-name_cp_index change: %d to %d", cur_index, new_index));
  1.2687 +          lv_table[j].name_cp_index = (u2)new_index;
  1.2688 +        }
  1.2689 +        cur_index = lv_table[j].descriptor_cp_index;
  1.2690 +        new_index = find_new_index(cur_index);
  1.2691 +        if (new_index != 0) {
  1.2692 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2693 +            ("lvt-descriptor_cp_index change: %d to %d", cur_index,
  1.2694 +            new_index));
  1.2695 +          lv_table[j].descriptor_cp_index = (u2)new_index;
  1.2696 +        }
  1.2697 +        cur_index = lv_table[j].signature_cp_index;
  1.2698 +        new_index = find_new_index(cur_index);
  1.2699 +        if (new_index != 0) {
  1.2700 +          RC_TRACE_WITH_THREAD(0x00080000, THREAD,
  1.2701 +            ("lvt-signature_cp_index change: %d to %d", cur_index, new_index));
  1.2702 +          lv_table[j].signature_cp_index = (u2)new_index;
  1.2703 +        }
  1.2704 +      } // end for each local variable table entry
  1.2705 +    } // end if there are local variable table entries
  1.2706 +
  1.2707 +    rewrite_cp_refs_in_stack_map_table(method, THREAD);
  1.2708 +  } // end for each method
  1.2709 +} // end set_new_constant_pool()
  1.2710 +
  1.2711 +
  1.2712 +// Unevolving classes may point to methods of the_class directly
  1.2713 +// from their constant pool caches, itables, and/or vtables. We
  1.2714 +// use the ClassLoaderDataGraph::classes_do() facility and this helper
  1.2715 +// to fix up these pointers.
  1.2716 +
  1.2717 +// Adjust cpools and vtables closure
  1.2718 +void VM_RedefineClasses::AdjustCpoolCacheAndVtable::do_klass(Klass* k) {
  1.2719 +
  1.2720 +  // This is a very busy routine. We don't want too much tracing
  1.2721 +  // printed out.
  1.2722 +  bool trace_name_printed = false;
  1.2723 +
  1.2724 +  // Very noisy: only enable this call if you are trying to determine
  1.2725 +  // that a specific class gets found by this routine.
  1.2726 +  // RC_TRACE macro has an embedded ResourceMark
  1.2727 +  // RC_TRACE_WITH_THREAD(0x00100000, THREAD,
  1.2728 +  //   ("adjust check: name=%s", k->external_name()));
  1.2729 +  // trace_name_printed = true;
  1.2730 +
  1.2731 +  // If the class being redefined is java.lang.Object, we need to fix all
  1.2732 +  // array class vtables also
  1.2733 +  if (k->oop_is_array() && _the_class_oop == SystemDictionary::Object_klass()) {
  1.2734 +    k->vtable()->adjust_method_entries(_matching_old_methods,
  1.2735 +                                       _matching_new_methods,
  1.2736 +                                       _matching_methods_length,
  1.2737 +                                       &trace_name_printed);
  1.2738 +  } else if (k->oop_is_instance()) {
  1.2739 +    HandleMark hm(_thread);
  1.2740 +    InstanceKlass *ik = InstanceKlass::cast(k);
  1.2741 +
  1.2742 +    // HotSpot specific optimization! HotSpot does not currently
  1.2743 +    // support delegation from the bootstrap class loader to a
  1.2744 +    // user-defined class loader. This means that if the bootstrap
  1.2745 +    // class loader is the initiating class loader, then it will also
  1.2746 +    // be the defining class loader. This also means that classes
  1.2747 +    // loaded by the bootstrap class loader cannot refer to classes
  1.2748 +    // loaded by a user-defined class loader. Note: a user-defined
  1.2749 +    // class loader can delegate to the bootstrap class loader.
  1.2750 +    //
  1.2751 +    // If the current class being redefined has a user-defined class
  1.2752 +    // loader as its defining class loader, then we can skip all
  1.2753 +    // classes loaded by the bootstrap class loader.
  1.2754 +    bool is_user_defined =
  1.2755 +           InstanceKlass::cast(_the_class_oop)->class_loader() != NULL;
  1.2756 +    if (is_user_defined && ik->class_loader() == NULL) {
  1.2757 +      return;
  1.2758 +    }
  1.2759 +
  1.2760 +    // Fix the vtable embedded in the_class and subclasses of the_class,
  1.2761 +    // if one exists. We discard scratch_class and we don't keep an
  1.2762 +    // InstanceKlass around to hold obsolete methods so we don't have
  1.2763 +    // any other InstanceKlass embedded vtables to update. The vtable
  1.2764 +    // holds the Method*s for virtual (but not final) methods.
  1.2765 +    // Default methods, or concrete methods in interfaces are stored
  1.2766 +    // in the vtable, so if an interface changes we need to check
  1.2767 +    // adjust_method_entries() for every InstanceKlass, which will also
  1.2768 +    // adjust the default method vtable indices.
  1.2769 +    // We also need to adjust any default method entries that are
  1.2770 +    // not yet in the vtable, because the vtable setup is in progress.
  1.2771 +    // This must be done after we adjust the default_methods and
  1.2772 +    // default_vtable_indices for methods already in the vtable.
  1.2773 +    if (ik->vtable_length() > 0 && (_the_class_oop->is_interface()
  1.2774 +        || ik->is_subtype_of(_the_class_oop))) {
  1.2775 +      // ik->vtable() creates a wrapper object; rm cleans it up
  1.2776 +      ResourceMark rm(_thread);
  1.2777 +      ik->vtable()->adjust_method_entries(_matching_old_methods,
  1.2778 +                                          _matching_new_methods,
  1.2779 +                                          _matching_methods_length,
  1.2780 +                                          &trace_name_printed);
  1.2781 +      ik->adjust_default_methods(_matching_old_methods,
  1.2782 +                                 _matching_new_methods,
  1.2783 +                                 _matching_methods_length,
  1.2784 +                                 &trace_name_printed);
  1.2785 +    }
  1.2786 +
  1.2787 +    // If the current class has an itable and we are either redefining an
  1.2788 +    // interface or if the current class is a subclass of the_class, then
  1.2789 +    // we potentially have to fix the itable. If we are redefining an
  1.2790 +    // interface, then we have to call adjust_method_entries() for
  1.2791 +    // every InstanceKlass that has an itable since there isn't a
  1.2792 +    // subclass relationship between an interface and an InstanceKlass.
  1.2793 +    if (ik->itable_length() > 0 && (_the_class_oop->is_interface()
  1.2794 +        || ik->is_subclass_of(_the_class_oop))) {
  1.2795 +      // ik->itable() creates a wrapper object; rm cleans it up
  1.2796 +      ResourceMark rm(_thread);
  1.2797 +      ik->itable()->adjust_method_entries(_matching_old_methods,
  1.2798 +                                          _matching_new_methods,
  1.2799 +                                          _matching_methods_length,
  1.2800 +                                          &trace_name_printed);
  1.2801 +    }
  1.2802 +
  1.2803 +    // The constant pools in other classes (other_cp) can refer to
  1.2804 +    // methods in the_class. We have to update method information in
  1.2805 +    // other_cp's cache. If other_cp has a previous version, then we
  1.2806 +    // have to repeat the process for each previous version. The
  1.2807 +    // constant pool cache holds the Method*s for non-virtual
  1.2808 +    // methods and for virtual, final methods.
  1.2809 +    //
  1.2810 +    // Special case: if the current class is the_class, then new_cp
  1.2811 +    // has already been attached to the_class and old_cp has already
  1.2812 +    // been added as a previous version. The new_cp doesn't have any
  1.2813 +    // cached references to old methods so it doesn't need to be
  1.2814 +    // updated. We can simply start with the previous version(s) in
  1.2815 +    // that case.
  1.2816 +    constantPoolHandle other_cp;
  1.2817 +    ConstantPoolCache* cp_cache;
  1.2818 +
  1.2819 +    if (ik != _the_class_oop) {
  1.2820 +      // this klass' constant pool cache may need adjustment
  1.2821 +      other_cp = constantPoolHandle(ik->constants());
  1.2822 +      cp_cache = other_cp->cache();
  1.2823 +      if (cp_cache != NULL) {
  1.2824 +        cp_cache->adjust_method_entries(_matching_old_methods,
  1.2825 +                                        _matching_new_methods,
  1.2826 +                                        _matching_methods_length,
  1.2827 +                                        &trace_name_printed);
  1.2828 +      }
  1.2829 +    }
  1.2830 +
  1.2831 +    // the previous versions' constant pool caches may need adjustment
  1.2832 +    PreviousVersionWalker pvw(_thread, ik);
  1.2833 +    for (PreviousVersionNode * pv_node = pvw.next_previous_version();
  1.2834 +         pv_node != NULL; pv_node = pvw.next_previous_version()) {
  1.2835 +      other_cp = pv_node->prev_constant_pool();
  1.2836 +      cp_cache = other_cp->cache();
  1.2837 +      if (cp_cache != NULL) {
  1.2838 +        cp_cache->adjust_method_entries(_matching_old_methods,
  1.2839 +                                        _matching_new_methods,
  1.2840 +                                        _matching_methods_length,
  1.2841 +                                        &trace_name_printed);
  1.2842 +      }
  1.2843 +    }
  1.2844 +  }
  1.2845 +}
  1.2846 +
  1.2847 +void VM_RedefineClasses::update_jmethod_ids() {
  1.2848 +  for (int j = 0; j < _matching_methods_length; ++j) {
  1.2849 +    Method* old_method = _matching_old_methods[j];
  1.2850 +    jmethodID jmid = old_method->find_jmethod_id_or_null();
  1.2851 +    if (jmid != NULL) {
  1.2852 +      // There is a jmethodID, change it to point to the new method
  1.2853 +      methodHandle new_method_h(_matching_new_methods[j]);
  1.2854 +      Method::change_method_associated_with_jmethod_id(jmid, new_method_h());
  1.2855 +      assert(Method::resolve_jmethod_id(jmid) == _matching_new_methods[j],
  1.2856 +             "should be replaced");
  1.2857 +    }
  1.2858 +  }
  1.2859 +}
  1.2860 +
  1.2861 +void VM_RedefineClasses::check_methods_and_mark_as_obsolete(
  1.2862 +       BitMap *emcp_methods, int * emcp_method_count_p) {
  1.2863 +  *emcp_method_count_p = 0;
  1.2864 +  int obsolete_count = 0;
  1.2865 +  int old_index = 0;
  1.2866 +  for (int j = 0; j < _matching_methods_length; ++j, ++old_index) {
  1.2867 +    Method* old_method = _matching_old_methods[j];
  1.2868 +    Method* new_method = _matching_new_methods[j];
  1.2869 +    Method* old_array_method;
  1.2870 +
  1.2871 +    // Maintain an old_index into the _old_methods array by skipping
  1.2872 +    // deleted methods
  1.2873 +    while ((old_array_method = _old_methods->at(old_index)) != old_method) {
  1.2874 +      ++old_index;
  1.2875 +    }
  1.2876 +
  1.2877 +    if (MethodComparator::methods_EMCP(old_method, new_method)) {
  1.2878 +      // The EMCP definition from JSR-163 requires the bytecodes to be
  1.2879 +      // the same with the exception of constant pool indices which may
  1.2880 +      // differ. However, the constants referred to by those indices
  1.2881 +      // must be the same.
  1.2882 +      //
  1.2883 +      // We use methods_EMCP() for comparison since constant pool
  1.2884 +      // merging can remove duplicate constant pool entries that were
  1.2885 +      // present in the old method and removed from the rewritten new
  1.2886 +      // method. A faster binary comparison function would consider the
  1.2887 +      // old and new methods to be different when they are actually
  1.2888 +      // EMCP.
  1.2889 +      //
  1.2890 +      // The old and new methods are EMCP and you would think that we
  1.2891 +      // could get rid of one of them here and now and save some space.
  1.2892 +      // However, the concept of EMCP only considers the bytecodes and
  1.2893 +      // the constant pool entries in the comparison. Other things,
  1.2894 +      // e.g., the line number table (LNT) or the local variable table
  1.2895 +      // (LVT) don't count in the comparison. So the new (and EMCP)
  1.2896 +      // method can have a new LNT that we need so we can't just
  1.2897 +      // overwrite the new method with the old method.
  1.2898 +      //
  1.2899 +      // When this routine is called, we have already attached the new
  1.2900 +      // methods to the_class so the old methods are effectively
  1.2901 +      // overwritten. However, if an old method is still executing,
  1.2902 +      // then the old method cannot be collected until sometime after
  1.2903 +      // the old method call has returned. So the overwriting of old
  1.2904 +      // methods by new methods will save us space except for those
  1.2905 +      // (hopefully few) old methods that are still executing.
  1.2906 +      //
  1.2907 +      // A method refers to a ConstMethod* and this presents another
  1.2908 +      // possible avenue to space savings. The ConstMethod* in the
  1.2909 +      // new method contains possibly new attributes (LNT, LVT, etc).
  1.2910 +      // At first glance, it seems possible to save space by replacing
  1.2911 +      // the ConstMethod* in the old method with the ConstMethod*
  1.2912 +      // from the new method. The old and new methods would share the
  1.2913 +      // same ConstMethod* and we would save the space occupied by
  1.2914 +      // the old ConstMethod*. However, the ConstMethod* contains
  1.2915 +      // a back reference to the containing method. Sharing the
  1.2916 +      // ConstMethod* between two methods could lead to confusion in
  1.2917 +      // the code that uses the back reference. This would lead to
  1.2918 +      // brittle code that could be broken in non-obvious ways now or
  1.2919 +      // in the future.
  1.2920 +      //
  1.2921 +      // Another possibility is to copy the ConstMethod* from the new
  1.2922 +      // method to the old method and then overwrite the new method with
  1.2923 +      // the old method. Since the ConstMethod* contains the bytecodes
  1.2924 +      // for the method embedded in the oop, this option would change
  1.2925 +      // the bytecodes out from under any threads executing the old
  1.2926 +      // method and make the thread's bcp invalid. Since EMCP requires
  1.2927 +      // that the bytecodes be the same modulo constant pool indices, it
  1.2928 +      // is straight forward to compute the correct new bcp in the new
  1.2929 +      // ConstMethod* from the old bcp in the old ConstMethod*. The
  1.2930 +      // time consuming part would be searching all the frames in all
  1.2931 +      // of the threads to find all of the calls to the old method.
  1.2932 +      //
  1.2933 +      // It looks like we will have to live with the limited savings
  1.2934 +      // that we get from effectively overwriting the old methods
  1.2935 +      // when the new methods are attached to the_class.
  1.2936 +
  1.2937 +      // track which methods are EMCP for add_previous_version() call
  1.2938 +      emcp_methods->set_bit(old_index);
  1.2939 +      (*emcp_method_count_p)++;
  1.2940 +
  1.2941 +      // An EMCP method is _not_ obsolete. An obsolete method has a
  1.2942 +      // different jmethodID than the current method. An EMCP method
  1.2943 +      // has the same jmethodID as the current method. Having the
  1.2944 +      // same jmethodID for all EMCP versions of a method allows for
  1.2945 +      // a consistent view of the EMCP methods regardless of which
  1.2946 +      // EMCP method you happen to have in hand. For example, a
  1.2947 +      // breakpoint set in one EMCP method will work for all EMCP
  1.2948 +      // versions of the method including the current one.
  1.2949 +    } else {
  1.2950 +      // mark obsolete methods as such
  1.2951 +      old_method->set_is_obsolete();
  1.2952 +      obsolete_count++;
  1.2953 +
  1.2954 +      // obsolete methods need a unique idnum so they become new entries in
  1.2955 +      // the jmethodID cache in InstanceKlass
  1.2956 +      u2 num = InstanceKlass::cast(_the_class_oop)->next_method_idnum();
  1.2957 +      if (num != ConstMethod::UNSET_IDNUM) {
  1.2958 +        old_method->set_method_idnum(num);
  1.2959 +      }
  1.2960 +
  1.2961 +      // With tracing we try not to "yack" too much. The position of
  1.2962 +      // this trace assumes there are fewer obsolete methods than
  1.2963 +      // EMCP methods.
  1.2964 +      RC_TRACE(0x00000100, ("mark %s(%s) as obsolete",
  1.2965 +        old_method->name()->as_C_string(),
  1.2966 +        old_method->signature()->as_C_string()));
  1.2967 +    }
  1.2968 +    old_method->set_is_old();
  1.2969 +  }
  1.2970 +  for (int i = 0; i < _deleted_methods_length; ++i) {
  1.2971 +    Method* old_method = _deleted_methods[i];
  1.2972 +
  1.2973 +    assert(!old_method->has_vtable_index(),
  1.2974 +           "cannot delete methods with vtable entries");;
  1.2975 +
  1.2976 +    // Mark all deleted methods as old and obsolete
  1.2977 +    old_method->set_is_old();
  1.2978 +    old_method->set_is_obsolete();
  1.2979 +    ++obsolete_count;
  1.2980 +    // With tracing we try not to "yack" too much. The position of
  1.2981 +    // this trace assumes there are fewer obsolete methods than
  1.2982 +    // EMCP methods.
  1.2983 +    RC_TRACE(0x00000100, ("mark deleted %s(%s) as obsolete",
  1.2984 +                          old_method->name()->as_C_string(),
  1.2985 +                          old_method->signature()->as_C_string()));
  1.2986 +  }
  1.2987 +  assert((*emcp_method_count_p + obsolete_count) == _old_methods->length(),
  1.2988 +    "sanity check");
  1.2989 +  RC_TRACE(0x00000100, ("EMCP_cnt=%d, obsolete_cnt=%d", *emcp_method_count_p,
  1.2990 +    obsolete_count));
  1.2991 +}
  1.2992 +
  1.2993 +// This internal class transfers the native function registration from old methods
  1.2994 +// to new methods.  It is designed to handle both the simple case of unchanged
  1.2995 +// native methods and the complex cases of native method prefixes being added and/or
  1.2996 +// removed.
  1.2997 +// It expects only to be used during the VM_RedefineClasses op (a safepoint).
  1.2998 +//
  1.2999 +// This class is used after the new methods have been installed in "the_class".
  1.3000 +//
  1.3001 +// So, for example, the following must be handled.  Where 'm' is a method and
  1.3002 +// a number followed by an underscore is a prefix.
  1.3003 +//
  1.3004 +//                                      Old Name    New Name
  1.3005 +// Simple transfer to new method        m       ->  m
  1.3006 +// Add prefix                           m       ->  1_m
  1.3007 +// Remove prefix                        1_m     ->  m
  1.3008 +// Simultaneous add of prefixes         m       ->  3_2_1_m
  1.3009 +// Simultaneous removal of prefixes     3_2_1_m ->  m
  1.3010 +// Simultaneous add and remove          1_m     ->  2_m
  1.3011 +// Same, caused by prefix removal only  3_2_1_m ->  3_2_m
  1.3012 +//
  1.3013 +class TransferNativeFunctionRegistration {
  1.3014 + private:
  1.3015 +  instanceKlassHandle the_class;
  1.3016 +  int prefix_count;
  1.3017 +  char** prefixes;
  1.3018 +
  1.3019 +  // Recursively search the binary tree of possibly prefixed method names.
  1.3020 +  // Iteration could be used if all agents were well behaved. Full tree walk is
  1.3021 +  // more resilent to agents not cleaning up intermediate methods.
  1.3022 +  // Branch at each depth in the binary tree is:
  1.3023 +  //    (1) without the prefix.
  1.3024 +  //    (2) with the prefix.
  1.3025 +  // where 'prefix' is the prefix at that 'depth' (first prefix, second prefix,...)
  1.3026 +  Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
  1.3027 +                                     Symbol* signature) {
  1.3028 +    TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
  1.3029 +    if (name_symbol != NULL) {
  1.3030 +      Method* method = the_class()->lookup_method(name_symbol, signature);
  1.3031 +      if (method != NULL) {
  1.3032 +        // Even if prefixed, intermediate methods must exist.
  1.3033 +        if (method->is_native()) {
  1.3034 +          // Wahoo, we found a (possibly prefixed) version of the method, return it.
  1.3035 +          return method;
  1.3036 +        }
  1.3037 +        if (depth < prefix_count) {
  1.3038 +          // Try applying further prefixes (other than this one).
  1.3039 +          method = search_prefix_name_space(depth+1, name_str, name_len, signature);
  1.3040 +          if (method != NULL) {
  1.3041 +            return method; // found
  1.3042 +          }
  1.3043 +
  1.3044 +          // Try adding this prefix to the method name and see if it matches
  1.3045 +          // another method name.
  1.3046 +          char* prefix = prefixes[depth];
  1.3047 +          size_t prefix_len = strlen(prefix);
  1.3048 +          size_t trial_len = name_len + prefix_len;
  1.3049 +          char* trial_name_str = NEW_RESOURCE_ARRAY(char, trial_len + 1);
  1.3050 +          strcpy(trial_name_str, prefix);
  1.3051 +          strcat(trial_name_str, name_str);
  1.3052 +          method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
  1.3053 +                                            signature);
  1.3054 +          if (method != NULL) {
  1.3055 +            // If found along this branch, it was prefixed, mark as such
  1.3056 +            method->set_is_prefixed_native();
  1.3057 +            return method; // found
  1.3058 +          }
  1.3059 +        }
  1.3060 +      }
  1.3061 +    }
  1.3062 +    return NULL;  // This whole branch bore nothing
  1.3063 +  }
  1.3064 +
  1.3065 +  // Return the method name with old prefixes stripped away.
  1.3066 +  char* method_name_without_prefixes(Method* method) {
  1.3067 +    Symbol* name = method->name();
  1.3068 +    char* name_str = name->as_utf8();
  1.3069 +
  1.3070 +    // Old prefixing may be defunct, strip prefixes, if any.
  1.3071 +    for (int i = prefix_count-1; i >= 0; i--) {
  1.3072 +      char* prefix = prefixes[i];
  1.3073 +      size_t prefix_len = strlen(prefix);
  1.3074 +      if (strncmp(prefix, name_str, prefix_len) == 0) {
  1.3075 +        name_str += prefix_len;
  1.3076 +      }
  1.3077 +    }
  1.3078 +    return name_str;
  1.3079 +  }
  1.3080 +
  1.3081 +  // Strip any prefixes off the old native method, then try to find a
  1.3082 +  // (possibly prefixed) new native that matches it.
  1.3083 +  Method* strip_and_search_for_new_native(Method* method) {
  1.3084 +    ResourceMark rm;
  1.3085 +    char* name_str = method_name_without_prefixes(method);
  1.3086 +    return search_prefix_name_space(0, name_str, strlen(name_str),
  1.3087 +                                    method->signature());
  1.3088 +  }
  1.3089 +
  1.3090 + public:
  1.3091 +
  1.3092 +  // Construct a native method transfer processor for this class.
  1.3093 +  TransferNativeFunctionRegistration(instanceKlassHandle _the_class) {
  1.3094 +    assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
  1.3095 +
  1.3096 +    the_class = _the_class;
  1.3097 +    prefixes = JvmtiExport::get_all_native_method_prefixes(&prefix_count);
  1.3098 +  }
  1.3099 +
  1.3100 +  // Attempt to transfer any of the old or deleted methods that are native
  1.3101 +  void transfer_registrations(Method** old_methods, int methods_length) {
  1.3102 +    for (int j = 0; j < methods_length; j++) {
  1.3103 +      Method* old_method = old_methods[j];
  1.3104 +
  1.3105 +      if (old_method->is_native() && old_method->has_native_function()) {
  1.3106 +        Method* new_method = strip_and_search_for_new_native(old_method);
  1.3107 +        if (new_method != NULL) {
  1.3108 +          // Actually set the native function in the new method.
  1.3109 +          // Redefine does not send events (except CFLH), certainly not this
  1.3110 +          // behind the scenes re-registration.
  1.3111 +          new_method->set_native_function(old_method->native_function(),
  1.3112 +                              !Method::native_bind_event_is_interesting);
  1.3113 +        }
  1.3114 +      }
  1.3115 +    }
  1.3116 +  }
  1.3117 +};
  1.3118 +
  1.3119 +// Don't lose the association between a native method and its JNI function.
  1.3120 +void VM_RedefineClasses::transfer_old_native_function_registrations(instanceKlassHandle the_class) {
  1.3121 +  TransferNativeFunctionRegistration transfer(the_class);
  1.3122 +  transfer.transfer_registrations(_deleted_methods, _deleted_methods_length);
  1.3123 +  transfer.transfer_registrations(_matching_old_methods, _matching_methods_length);
  1.3124 +}
  1.3125 +
  1.3126 +// Deoptimize all compiled code that depends on this class.
  1.3127 +//
  1.3128 +// If the can_redefine_classes capability is obtained in the onload
  1.3129 +// phase then the compiler has recorded all dependencies from startup.
  1.3130 +// In that case we need only deoptimize and throw away all compiled code
  1.3131 +// that depends on the class.
  1.3132 +//
  1.3133 +// If can_redefine_classes is obtained sometime after the onload
  1.3134 +// phase then the dependency information may be incomplete. In that case
  1.3135 +// the first call to RedefineClasses causes all compiled code to be
  1.3136 +// thrown away. As can_redefine_classes has been obtained then
  1.3137 +// all future compilations will record dependencies so second and
  1.3138 +// subsequent calls to RedefineClasses need only throw away code
  1.3139 +// that depends on the class.
  1.3140 +//
  1.3141 +void VM_RedefineClasses::flush_dependent_code(instanceKlassHandle k_h, TRAPS) {
  1.3142 +  assert_locked_or_safepoint(Compile_lock);
  1.3143 +
  1.3144 +  // All dependencies have been recorded from startup or this is a second or
  1.3145 +  // subsequent use of RedefineClasses
  1.3146 +  if (JvmtiExport::all_dependencies_are_recorded()) {
  1.3147 +    Universe::flush_evol_dependents_on(k_h);
  1.3148 +  } else {
  1.3149 +    CodeCache::mark_all_nmethods_for_deoptimization();
  1.3150 +
  1.3151 +    ResourceMark rm(THREAD);
  1.3152 +    DeoptimizationMarker dm;
  1.3153 +
  1.3154 +    // Deoptimize all activations depending on marked nmethods
  1.3155 +    Deoptimization::deoptimize_dependents();
  1.3156 +
  1.3157 +    // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies)
  1.3158 +    CodeCache::make_marked_nmethods_not_entrant();
  1.3159 +
  1.3160 +    // From now on we know that the dependency information is complete
  1.3161 +    JvmtiExport::set_all_dependencies_are_recorded(true);
  1.3162 +  }
  1.3163 +}
  1.3164 +
  1.3165 +void VM_RedefineClasses::compute_added_deleted_matching_methods() {
  1.3166 +  Method* old_method;
  1.3167 +  Method* new_method;
  1.3168 +
  1.3169 +  _matching_old_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
  1.3170 +  _matching_new_methods = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
  1.3171 +  _added_methods        = NEW_RESOURCE_ARRAY(Method*, _new_methods->length());
  1.3172 +  _deleted_methods      = NEW_RESOURCE_ARRAY(Method*, _old_methods->length());
  1.3173 +
  1.3174 +  _matching_methods_length = 0;
  1.3175 +  _deleted_methods_length  = 0;
  1.3176 +  _added_methods_length    = 0;
  1.3177 +
  1.3178 +  int nj = 0;
  1.3179 +  int oj = 0;
  1.3180 +  while (true) {
  1.3181 +    if (oj >= _old_methods->length()) {
  1.3182 +      if (nj >= _new_methods->length()) {
  1.3183 +        break; // we've looked at everything, done
  1.3184 +      }
  1.3185 +      // New method at the end
  1.3186 +      new_method = _new_methods->at(nj);
  1.3187 +      _added_methods[_added_methods_length++] = new_method;
  1.3188 +      ++nj;
  1.3189 +    } else if (nj >= _new_methods->length()) {
  1.3190 +      // Old method, at the end, is deleted
  1.3191 +      old_method = _old_methods->at(oj);
  1.3192 +      _deleted_methods[_deleted_methods_length++] = old_method;
  1.3193 +      ++oj;
  1.3194 +    } else {
  1.3195 +      old_method = _old_methods->at(oj);
  1.3196 +      new_method = _new_methods->at(nj);
  1.3197 +      if (old_method->name() == new_method->name()) {
  1.3198 +        if (old_method->signature() == new_method->signature()) {
  1.3199 +          _matching_old_methods[_matching_methods_length  ] = old_method;
  1.3200 +          _matching_new_methods[_matching_methods_length++] = new_method;
  1.3201 +          ++nj;
  1.3202 +          ++oj;
  1.3203 +        } else {
  1.3204 +          // added overloaded have already been moved to the end,
  1.3205 +          // so this is a deleted overloaded method
  1.3206 +          _deleted_methods[_deleted_methods_length++] = old_method;
  1.3207 +          ++oj;
  1.3208 +        }
  1.3209 +      } else { // names don't match
  1.3210 +        if (old_method->name()->fast_compare(new_method->name()) > 0) {
  1.3211 +          // new method
  1.3212 +          _added_methods[_added_methods_length++] = new_method;
  1.3213 +          ++nj;
  1.3214 +        } else {
  1.3215 +          // deleted method
  1.3216 +          _deleted_methods[_deleted_methods_length++] = old_method;
  1.3217 +          ++oj;
  1.3218 +        }
  1.3219 +      }
  1.3220 +    }
  1.3221 +  }
  1.3222 +  assert(_matching_methods_length + _deleted_methods_length == _old_methods->length(), "sanity");
  1.3223 +  assert(_matching_methods_length + _added_methods_length == _new_methods->length(), "sanity");
  1.3224 +}
  1.3225 +
  1.3226 +
  1.3227 +void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class,
  1.3228 +                                          instanceKlassHandle scratch_class) {
  1.3229 +  // Since there is currently no rewriting of type annotations indexes
  1.3230 +  // into the CP, we null out type annotations on scratch_class before
  1.3231 +  // we swap annotations with the_class rather than facing the
  1.3232 +  // possibility of shipping annotations with broken indexes to
  1.3233 +  // Java-land.
  1.3234 +  ClassLoaderData* loader_data = scratch_class->class_loader_data();
  1.3235 +  AnnotationArray* new_class_type_annotations = scratch_class->class_type_annotations();
  1.3236 +  if (new_class_type_annotations != NULL) {
  1.3237 +    MetadataFactory::free_array<u1>(loader_data, new_class_type_annotations);
  1.3238 +    scratch_class->annotations()->set_class_type_annotations(NULL);
  1.3239 +  }
  1.3240 +  Array<AnnotationArray*>* new_field_type_annotations = scratch_class->fields_type_annotations();
  1.3241 +  if (new_field_type_annotations != NULL) {
  1.3242 +    Annotations::free_contents(loader_data, new_field_type_annotations);
  1.3243 +    scratch_class->annotations()->set_fields_type_annotations(NULL);
  1.3244 +  }
  1.3245 +
  1.3246 +  // Swap annotation fields values
  1.3247 +  Annotations* old_annotations = the_class->annotations();
  1.3248 +  the_class->set_annotations(scratch_class->annotations());
  1.3249 +  scratch_class->set_annotations(old_annotations);
  1.3250 +}
  1.3251 +
  1.3252 +
  1.3253 +// Install the redefinition of a class:
  1.3254 +//    - house keeping (flushing breakpoints and caches, deoptimizing
  1.3255 +//      dependent compiled code)
  1.3256 +//    - replacing parts in the_class with parts from scratch_class
  1.3257 +//    - adding a weak reference to track the obsolete but interesting
  1.3258 +//      parts of the_class
  1.3259 +//    - adjusting constant pool caches and vtables in other classes
  1.3260 +//      that refer to methods in the_class. These adjustments use the
  1.3261 +//      ClassLoaderDataGraph::classes_do() facility which only allows
  1.3262 +//      a helper method to be specified. The interesting parameters
  1.3263 +//      that we would like to pass to the helper method are saved in
  1.3264 +//      static global fields in the VM operation.
  1.3265 +void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
  1.3266 +       Klass* scratch_class_oop, TRAPS) {
  1.3267 +
  1.3268 +  HandleMark hm(THREAD);   // make sure handles from this call are freed
  1.3269 +  RC_TIMER_START(_timer_rsc_phase1);
  1.3270 +
  1.3271 +  instanceKlassHandle scratch_class(scratch_class_oop);
  1.3272 +
  1.3273 +  oop the_class_mirror = JNIHandles::resolve_non_null(the_jclass);
  1.3274 +  Klass* the_class_oop = java_lang_Class::as_Klass(the_class_mirror);
  1.3275 +  instanceKlassHandle the_class = instanceKlassHandle(THREAD, the_class_oop);
  1.3276 +
  1.3277 +  // Remove all breakpoints in methods of this class
  1.3278 +  JvmtiBreakpoints& jvmti_breakpoints = JvmtiCurrentBreakpoints::get_jvmti_breakpoints();
  1.3279 +  jvmti_breakpoints.clearall_in_class_at_safepoint(the_class_oop);
  1.3280 +
  1.3281 +  // Deoptimize all compiled code that depends on this class
  1.3282 +  flush_dependent_code(the_class, THREAD);
  1.3283 +
  1.3284 +  _old_methods = the_class->methods();
  1.3285 +  _new_methods = scratch_class->methods();
  1.3286 +  _the_class_oop = the_class_oop;
  1.3287 +  compute_added_deleted_matching_methods();
  1.3288 +  update_jmethod_ids();
  1.3289 +
  1.3290 +  // Attach new constant pool to the original klass. The original
  1.3291 +  // klass still refers to the old constant pool (for now).
  1.3292 +  scratch_class->constants()->set_pool_holder(the_class());
  1.3293 +
  1.3294 +#if 0
  1.3295 +  // In theory, with constant pool merging in place we should be able
  1.3296 +  // to save space by using the new, merged constant pool in place of
  1.3297 +  // the old constant pool(s). By "pool(s)" I mean the constant pool in
  1.3298 +  // the klass version we are replacing now and any constant pool(s) in
  1.3299 +  // previous versions of klass. Nice theory, doesn't work in practice.
  1.3300 +  // When this code is enabled, even simple programs throw NullPointer
  1.3301 +  // exceptions. I'm guessing that this is caused by some constant pool
  1.3302 +  // cache difference between the new, merged constant pool and the
  1.3303 +  // constant pool that was just being used by the klass. I'm keeping
  1.3304 +  // this code around to archive the idea, but the code has to remain
  1.3305 +  // disabled for now.
  1.3306 +
  1.3307 +  // Attach each old method to the new constant pool. This can be
  1.3308 +  // done here since we are past the bytecode verification and
  1.3309 +  // constant pool optimization phases.
  1.3310 +  for (int i = _old_methods->length() - 1; i >= 0; i--) {
  1.3311 +    Method* method = _old_methods->at(i);
  1.3312 +    method->set_constants(scratch_class->constants());
  1.3313 +  }
  1.3314 +
  1.3315 +  {
  1.3316 +    // walk all previous versions of the klass
  1.3317 +    InstanceKlass *ik = (InstanceKlass *)the_class();
  1.3318 +    PreviousVersionWalker pvw(ik);
  1.3319 +    instanceKlassHandle ikh;
  1.3320 +    do {
  1.3321 +      ikh = pvw.next_previous_version();
  1.3322 +      if (!ikh.is_null()) {
  1.3323 +        ik = ikh();
  1.3324 +
  1.3325 +        // attach previous version of klass to the new constant pool
  1.3326 +        ik->set_constants(scratch_class->constants());
  1.3327 +
  1.3328 +        // Attach each method in the previous version of klass to the
  1.3329 +        // new constant pool
  1.3330 +        Array<Method*>* prev_methods = ik->methods();
  1.3331 +        for (int i = prev_methods->length() - 1; i >= 0; i--) {
  1.3332 +          Method* method = prev_methods->at(i);
  1.3333 +          method->set_constants(scratch_class->constants());
  1.3334 +        }
  1.3335 +      }
  1.3336 +    } while (!ikh.is_null());
  1.3337 +  }
  1.3338 +#endif
  1.3339 +
  1.3340 +  // Replace methods and constantpool
  1.3341 +  the_class->set_methods(_new_methods);
  1.3342 +  scratch_class->set_methods(_old_methods);     // To prevent potential GCing of the old methods,
  1.3343 +                                          // and to be able to undo operation easily.
  1.3344 +
  1.3345 +  ConstantPool* old_constants = the_class->constants();
  1.3346 +  the_class->set_constants(scratch_class->constants());
  1.3347 +  scratch_class->set_constants(old_constants);  // See the previous comment.
  1.3348 +#if 0
  1.3349 +  // We are swapping the guts of "the new class" with the guts of "the
  1.3350 +  // class". Since the old constant pool has just been attached to "the
  1.3351 +  // new class", it seems logical to set the pool holder in the old
  1.3352 +  // constant pool also. However, doing this will change the observable
  1.3353 +  // class hierarchy for any old methods that are still executing. A
  1.3354 +  // method can query the identity of its "holder" and this query uses
  1.3355 +  // the method's constant pool link to find the holder. The change in
  1.3356 +  // holding class from "the class" to "the new class" can confuse
  1.3357 +  // things.
  1.3358 +  //
  1.3359 +  // Setting the old constant pool's holder will also cause
  1.3360 +  // verification done during vtable initialization below to fail.
  1.3361 +  // During vtable initialization, the vtable's class is verified to be
  1.3362 +  // a subtype of the method's holder. The vtable's class is "the
  1.3363 +  // class" and the method's holder is gotten from the constant pool
  1.3364 +  // link in the method itself. For "the class"'s directly implemented
  1.3365 +  // methods, the method holder is "the class" itself (as gotten from
  1.3366 +  // the new constant pool). The check works fine in this case. The
  1.3367 +  // check also works fine for methods inherited from super classes.
  1.3368 +  //
  1.3369 +  // Miranda methods are a little more complicated. A miranda method is
  1.3370 +  // provided by an interface when the class implementing the interface
  1.3371 +  // does not provide its own method.  These interfaces are implemented
  1.3372 +  // internally as an InstanceKlass. These special instanceKlasses
  1.3373 +  // share the constant pool of the class that "implements" the
  1.3374 +  // interface. By sharing the constant pool, the method holder of a
  1.3375 +  // miranda method is the class that "implements" the interface. In a
  1.3376 +  // non-redefine situation, the subtype check works fine. However, if
  1.3377 +  // the old constant pool's pool holder is modified, then the check
  1.3378 +  // fails because there is no class hierarchy relationship between the
  1.3379 +  // vtable's class and "the new class".
  1.3380 +
  1.3381 +  old_constants->set_pool_holder(scratch_class());
  1.3382 +#endif
  1.3383 +
  1.3384 +  // track which methods are EMCP for add_previous_version() call below
  1.3385 +  BitMap emcp_methods(_old_methods->length());
  1.3386 +  int emcp_method_count = 0;
  1.3387 +  emcp_methods.clear();  // clears 0..(length() - 1)
  1.3388 +  check_methods_and_mark_as_obsolete(&emcp_methods, &emcp_method_count);
  1.3389 +  transfer_old_native_function_registrations(the_class);
  1.3390 +
  1.3391 +  // The class file bytes from before any retransformable agents mucked
  1.3392 +  // with them was cached on the scratch class, move to the_class.
  1.3393 +  // Note: we still want to do this if nothing needed caching since it
  1.3394 +  // should get cleared in the_class too.
  1.3395 +  if (the_class->get_cached_class_file_bytes() == 0) {
  1.3396 +    // the_class doesn't have a cache yet so copy it
  1.3397 +    the_class->set_cached_class_file(scratch_class->get_cached_class_file());
  1.3398 +  }
  1.3399 +#ifndef PRODUCT
  1.3400 +  else {
  1.3401 +    assert(the_class->get_cached_class_file_bytes() ==
  1.3402 +      scratch_class->get_cached_class_file_bytes(), "cache ptrs must match");
  1.3403 +    assert(the_class->get_cached_class_file_len() ==
  1.3404 +      scratch_class->get_cached_class_file_len(), "cache lens must match");
  1.3405 +  }
  1.3406 +#endif
  1.3407 +
  1.3408 +  // NULL out in scratch class to not delete twice.  The class to be redefined
  1.3409 +  // always owns these bytes.
  1.3410 +  scratch_class->set_cached_class_file(NULL);
  1.3411 +
  1.3412 +  // Replace inner_classes
  1.3413 +  Array<u2>* old_inner_classes = the_class->inner_classes();
  1.3414 +  the_class->set_inner_classes(scratch_class->inner_classes());
  1.3415 +  scratch_class->set_inner_classes(old_inner_classes);
  1.3416 +
  1.3417 +  // Initialize the vtable and interface table after
  1.3418 +  // methods have been rewritten
  1.3419 +  {
  1.3420 +    ResourceMark rm(THREAD);
  1.3421 +    // no exception should happen here since we explicitly
  1.3422 +    // do not check loader constraints.
  1.3423 +    // compare_and_normalize_class_versions has already checked:
  1.3424 +    //  - classloaders unchanged, signatures unchanged
  1.3425 +    //  - all instanceKlasses for redefined classes reused & contents updated
  1.3426 +    the_class->vtable()->initialize_vtable(false, THREAD);
  1.3427 +    the_class->itable()->initialize_itable(false, THREAD);
  1.3428 +    assert(!HAS_PENDING_EXCEPTION || (THREAD->pending_exception()->is_a(SystemDictionary::ThreadDeath_klass())), "redefine exception");
  1.3429 +  }
  1.3430 +
  1.3431 +  // Leave arrays of jmethodIDs and itable index cache unchanged
  1.3432 +
  1.3433 +  // Copy the "source file name" attribute from new class version
  1.3434 +  the_class->set_source_file_name_index(
  1.3435 +    scratch_class->source_file_name_index());
  1.3436 +
  1.3437 +  // Copy the "source debug extension" attribute from new class version
  1.3438 +  the_class->set_source_debug_extension(
  1.3439 +    scratch_class->source_debug_extension(),
  1.3440 +    scratch_class->source_debug_extension() == NULL ? 0 :
  1.3441 +    (int)strlen(scratch_class->source_debug_extension()));
  1.3442 +
  1.3443 +  // Use of javac -g could be different in the old and the new
  1.3444 +  if (scratch_class->access_flags().has_localvariable_table() !=
  1.3445 +      the_class->access_flags().has_localvariable_table()) {
  1.3446 +
  1.3447 +    AccessFlags flags = the_class->access_flags();
  1.3448 +    if (scratch_class->access_flags().has_localvariable_table()) {
  1.3449 +      flags.set_has_localvariable_table();
  1.3450 +    } else {
  1.3451 +      flags.clear_has_localvariable_table();
  1.3452 +    }
  1.3453 +    the_class->set_access_flags(flags);
  1.3454 +  }
  1.3455 +
  1.3456 +  swap_annotations(the_class, scratch_class);
  1.3457 +
  1.3458 +  // Replace minor version number of class file
  1.3459 +  u2 old_minor_version = the_class->minor_version();
  1.3460 +  the_class->set_minor_version(scratch_class->minor_version());
  1.3461 +  scratch_class->set_minor_version(old_minor_version);
  1.3462 +
  1.3463 +  // Replace major version number of class file
  1.3464 +  u2 old_major_version = the_class->major_version();
  1.3465 +  the_class->set_major_version(scratch_class->major_version());
  1.3466 +  scratch_class->set_major_version(old_major_version);
  1.3467 +
  1.3468 +  // Replace CP indexes for class and name+type of enclosing method
  1.3469 +  u2 old_class_idx  = the_class->enclosing_method_class_index();
  1.3470 +  u2 old_method_idx = the_class->enclosing_method_method_index();
  1.3471 +  the_class->set_enclosing_method_indices(
  1.3472 +    scratch_class->enclosing_method_class_index(),
  1.3473 +    scratch_class->enclosing_method_method_index());
  1.3474 +  scratch_class->set_enclosing_method_indices(old_class_idx, old_method_idx);
  1.3475 +
  1.3476 +  // keep track of previous versions of this class
  1.3477 +  the_class->add_previous_version(scratch_class, &emcp_methods,
  1.3478 +    emcp_method_count);
  1.3479 +
  1.3480 +  RC_TIMER_STOP(_timer_rsc_phase1);
  1.3481 +  RC_TIMER_START(_timer_rsc_phase2);
  1.3482 +
  1.3483 +  // Adjust constantpool caches and vtables for all classes
  1.3484 +  // that reference methods of the evolved class.
  1.3485 +  AdjustCpoolCacheAndVtable adjust_cpool_cache_and_vtable(THREAD);
  1.3486 +  ClassLoaderDataGraph::classes_do(&adjust_cpool_cache_and_vtable);
  1.3487 +
  1.3488 +  // JSR-292 support
  1.3489 +  MemberNameTable* mnt = the_class->member_names();
  1.3490 +  if (mnt != NULL) {
  1.3491 +    bool trace_name_printed = false;
  1.3492 +    mnt->adjust_method_entries(_matching_old_methods,
  1.3493 +                               _matching_new_methods,
  1.3494 +                               _matching_methods_length,
  1.3495 +                               &trace_name_printed);
  1.3496 +  }
  1.3497 +
  1.3498 +  // Fix Resolution Error table also to remove old constant pools
  1.3499 +  SystemDictionary::delete_resolution_error(old_constants);
  1.3500 +
  1.3501 +  if (the_class->oop_map_cache() != NULL) {
  1.3502 +    // Flush references to any obsolete methods from the oop map cache
  1.3503 +    // so that obsolete methods are not pinned.
  1.3504 +    the_class->oop_map_cache()->flush_obsolete_entries();
  1.3505 +  }
  1.3506 +
  1.3507 +  // increment the classRedefinedCount field in the_class and in any
  1.3508 +  // direct and indirect subclasses of the_class
  1.3509 +  increment_class_counter((InstanceKlass *)the_class(), THREAD);
  1.3510 +
  1.3511 +  // RC_TRACE macro has an embedded ResourceMark
  1.3512 +  RC_TRACE_WITH_THREAD(0x00000001, THREAD,
  1.3513 +    ("redefined name=%s, count=%d (avail_mem=" UINT64_FORMAT "K)",
  1.3514 +    the_class->external_name(),
  1.3515 +    java_lang_Class::classRedefinedCount(the_class_mirror),
  1.3516 +    os::available_memory() >> 10));
  1.3517 +
  1.3518 +  RC_TIMER_STOP(_timer_rsc_phase2);
  1.3519 +} // end redefine_single_class()
  1.3520 +
  1.3521 +
  1.3522 +// Increment the classRedefinedCount field in the specific InstanceKlass
  1.3523 +// and in all direct and indirect subclasses.
  1.3524 +void VM_RedefineClasses::increment_class_counter(InstanceKlass *ik, TRAPS) {
  1.3525 +  oop class_mirror = ik->java_mirror();
  1.3526 +  Klass* class_oop = java_lang_Class::as_Klass(class_mirror);
  1.3527 +  int new_count = java_lang_Class::classRedefinedCount(class_mirror) + 1;
  1.3528 +  java_lang_Class::set_classRedefinedCount(class_mirror, new_count);
  1.3529 +
  1.3530 +  if (class_oop != _the_class_oop) {
  1.3531 +    // _the_class_oop count is printed at end of redefine_single_class()
  1.3532 +    RC_TRACE_WITH_THREAD(0x00000008, THREAD,
  1.3533 +      ("updated count in subclass=%s to %d", ik->external_name(), new_count));
  1.3534 +  }
  1.3535 +
  1.3536 +  for (Klass *subk = ik->subklass(); subk != NULL;
  1.3537 +       subk = subk->next_sibling()) {
  1.3538 +    if (subk->oop_is_instance()) {
  1.3539 +      // Only update instanceKlasses
  1.3540 +      InstanceKlass *subik = (InstanceKlass*)subk;
  1.3541 +      // recursively do subclasses of the current subclass
  1.3542 +      increment_class_counter(subik, THREAD);
  1.3543 +    }
  1.3544 +  }
  1.3545 +}
  1.3546 +
  1.3547 +void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
  1.3548 +  bool no_old_methods = true;  // be optimistic
  1.3549 +
  1.3550 +  // Both array and instance classes have vtables.
  1.3551 +  // a vtable should never contain old or obsolete methods
  1.3552 +  ResourceMark rm(_thread);
  1.3553 +  if (k->vtable_length() > 0 &&
  1.3554 +      !k->vtable()->check_no_old_or_obsolete_entries()) {
  1.3555 +    if (RC_TRACE_ENABLED(0x00004000)) {
  1.3556 +      RC_TRACE_WITH_THREAD(0x00004000, _thread,
  1.3557 +        ("klassVtable::check_no_old_or_obsolete_entries failure"
  1.3558 +         " -- OLD or OBSOLETE method found -- class: %s",
  1.3559 +         k->signature_name()));
  1.3560 +      k->vtable()->dump_vtable();
  1.3561 +    }
  1.3562 +    no_old_methods = false;
  1.3563 +  }
  1.3564 +
  1.3565 +  if (k->oop_is_instance()) {
  1.3566 +    HandleMark hm(_thread);
  1.3567 +    InstanceKlass *ik = InstanceKlass::cast(k);
  1.3568 +
  1.3569 +    // an itable should never contain old or obsolete methods
  1.3570 +    if (ik->itable_length() > 0 &&
  1.3571 +        !ik->itable()->check_no_old_or_obsolete_entries()) {
  1.3572 +      if (RC_TRACE_ENABLED(0x00004000)) {
  1.3573 +        RC_TRACE_WITH_THREAD(0x00004000, _thread,
  1.3574 +          ("klassItable::check_no_old_or_obsolete_entries failure"
  1.3575 +           " -- OLD or OBSOLETE method found -- class: %s",
  1.3576 +           ik->signature_name()));
  1.3577 +        ik->itable()->dump_itable();
  1.3578 +      }
  1.3579 +      no_old_methods = false;
  1.3580 +    }
  1.3581 +
  1.3582 +    // the constant pool cache should never contain old or obsolete methods
  1.3583 +    if (ik->constants() != NULL &&
  1.3584 +        ik->constants()->cache() != NULL &&
  1.3585 +        !ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
  1.3586 +      if (RC_TRACE_ENABLED(0x00004000)) {
  1.3587 +        RC_TRACE_WITH_THREAD(0x00004000, _thread,
  1.3588 +          ("cp-cache::check_no_old_or_obsolete_entries failure"
  1.3589 +           " -- OLD or OBSOLETE method found -- class: %s",
  1.3590 +           ik->signature_name()));
  1.3591 +        ik->constants()->cache()->dump_cache();
  1.3592 +      }
  1.3593 +      no_old_methods = false;
  1.3594 +    }
  1.3595 +  }
  1.3596 +
  1.3597 +  // print and fail guarantee if old methods are found.
  1.3598 +  if (!no_old_methods) {
  1.3599 +    if (RC_TRACE_ENABLED(0x00004000)) {
  1.3600 +      dump_methods();
  1.3601 +    } else {
  1.3602 +      tty->print_cr("INFO: use the '-XX:TraceRedefineClasses=16384' option "
  1.3603 +        "to see more info about the following guarantee() failure.");
  1.3604 +    }
  1.3605 +    guarantee(false, "OLD and/or OBSOLETE method(s) found");
  1.3606 +  }
  1.3607 +}
  1.3608 +
  1.3609 +
  1.3610 +void VM_RedefineClasses::dump_methods() {
  1.3611 +  int j;
  1.3612 +  RC_TRACE(0x00004000, ("_old_methods --"));
  1.3613 +  for (j = 0; j < _old_methods->length(); ++j) {
  1.3614 +    Method* m = _old_methods->at(j);
  1.3615 +    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
  1.3616 +    m->access_flags().print_on(tty);
  1.3617 +    tty->print(" --  ");
  1.3618 +    m->print_name(tty);
  1.3619 +    tty->cr();
  1.3620 +  }
  1.3621 +  RC_TRACE(0x00004000, ("_new_methods --"));
  1.3622 +  for (j = 0; j < _new_methods->length(); ++j) {
  1.3623 +    Method* m = _new_methods->at(j);
  1.3624 +    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
  1.3625 +    m->access_flags().print_on(tty);
  1.3626 +    tty->print(" --  ");
  1.3627 +    m->print_name(tty);
  1.3628 +    tty->cr();
  1.3629 +  }
  1.3630 +  RC_TRACE(0x00004000, ("_matching_(old/new)_methods --"));
  1.3631 +  for (j = 0; j < _matching_methods_length; ++j) {
  1.3632 +    Method* m = _matching_old_methods[j];
  1.3633 +    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
  1.3634 +    m->access_flags().print_on(tty);
  1.3635 +    tty->print(" --  ");
  1.3636 +    m->print_name(tty);
  1.3637 +    tty->cr();
  1.3638 +    m = _matching_new_methods[j];
  1.3639 +    RC_TRACE_NO_CR(0x00004000, ("      (%5d)  ", m->vtable_index()));
  1.3640 +    m->access_flags().print_on(tty);
  1.3641 +    tty->cr();
  1.3642 +  }
  1.3643 +  RC_TRACE(0x00004000, ("_deleted_methods --"));
  1.3644 +  for (j = 0; j < _deleted_methods_length; ++j) {
  1.3645 +    Method* m = _deleted_methods[j];
  1.3646 +    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
  1.3647 +    m->access_flags().print_on(tty);
  1.3648 +    tty->print(" --  ");
  1.3649 +    m->print_name(tty);
  1.3650 +    tty->cr();
  1.3651 +  }
  1.3652 +  RC_TRACE(0x00004000, ("_added_methods --"));
  1.3653 +  for (j = 0; j < _added_methods_length; ++j) {
  1.3654 +    Method* m = _added_methods[j];
  1.3655 +    RC_TRACE_NO_CR(0x00004000, ("%4d  (%5d)  ", j, m->vtable_index()));
  1.3656 +    m->access_flags().print_on(tty);
  1.3657 +    tty->print(" --  ");
  1.3658 +    m->print_name(tty);
  1.3659 +    tty->cr();
  1.3660 +  }
  1.3661 +}

mercurial