7033141: assert(has_cp_cache(i)) failed: oob

Sat, 21 May 2011 15:39:54 -0700

author
coleenp
date
Sat, 21 May 2011 15:39:54 -0700
changeset 2945
d3b9f2be46ab
parent 2928
cfbca4d74a61
child 2946
9dd6c4ba364f

7033141: assert(has_cp_cache(i)) failed: oob
Summary: Unrewrite bytecodes for OOM error allocating the constant pool cache.
Reviewed-by: dcubed, acorn, never

src/share/vm/interpreter/rewriter.cpp file | annotate | diff | comparison | revisions
src/share/vm/interpreter/rewriter.hpp file | annotate | diff | comparison | revisions
src/share/vm/oops/instanceKlass.cpp file | annotate | diff | comparison | revisions
src/share/vm/oops/instanceKlass.hpp file | annotate | diff | comparison | revisions
src/share/vm/oops/methodOop.cpp file | annotate | diff | comparison | revisions
src/share/vm/prims/jvmtiRedefineClasses.cpp file | annotate | diff | comparison | revisions
src/share/vm/prims/methodHandleWalk.cpp file | annotate | diff | comparison | revisions
     1.1 --- a/src/share/vm/interpreter/rewriter.cpp	Fri May 20 22:27:48 2011 -0700
     1.2 +++ b/src/share/vm/interpreter/rewriter.cpp	Sat May 21 15:39:54 2011 -0700
     1.3 @@ -63,6 +63,15 @@
     1.4    _have_invoke_dynamic = ((tag_mask & (1 << JVM_CONSTANT_InvokeDynamic)) != 0);
     1.5  }
     1.6  
     1.7 +// Unrewrite the bytecodes if an error occurs.
     1.8 +void Rewriter::restore_bytecodes() {
     1.9 +  int len = _methods->length();
    1.10 +
    1.11 +  for (int i = len-1; i >= 0; i--) {
    1.12 +    methodOop method = (methodOop)_methods->obj_at(i);
    1.13 +    scan_method(method, true);
    1.14 +  }
    1.15 +}
    1.16  
    1.17  // Creates a constant pool cache given a CPC map
    1.18  void Rewriter::make_constant_pool_cache(TRAPS) {
    1.19 @@ -133,57 +142,94 @@
    1.20  
    1.21  
    1.22  // Rewrite a classfile-order CP index into a native-order CPC index.
    1.23 -void Rewriter::rewrite_member_reference(address bcp, int offset) {
    1.24 +void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
    1.25    address p = bcp + offset;
    1.26 -  int  cp_index    = Bytes::get_Java_u2(p);
    1.27 -  int  cache_index = cp_entry_to_cp_cache(cp_index);
    1.28 -  Bytes::put_native_u2(p, cache_index);
    1.29 +  if (!reverse) {
    1.30 +    int  cp_index    = Bytes::get_Java_u2(p);
    1.31 +    int  cache_index = cp_entry_to_cp_cache(cp_index);
    1.32 +    Bytes::put_native_u2(p, cache_index);
    1.33 +  } else {
    1.34 +    int cache_index = Bytes::get_native_u2(p);
    1.35 +    int pool_index = cp_cache_entry_pool_index(cache_index);
    1.36 +    Bytes::put_Java_u2(p, pool_index);
    1.37 +  }
    1.38  }
    1.39  
    1.40  
    1.41 -void Rewriter::rewrite_invokedynamic(address bcp, int offset) {
    1.42 +void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) {
    1.43    address p = bcp + offset;
    1.44 -  assert(p[-1] == Bytecodes::_invokedynamic, "");
    1.45 -  int cp_index = Bytes::get_Java_u2(p);
    1.46 -  int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
    1.47 -  int cpc2 = add_secondary_cp_cache_entry(cpc);
    1.48 +  assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode");
    1.49 +  if (!reverse) {
    1.50 +    int cp_index = Bytes::get_Java_u2(p);
    1.51 +    int cpc  = maybe_add_cp_cache_entry(cp_index);  // add lazily
    1.52 +    int cpc2 = add_secondary_cp_cache_entry(cpc);
    1.53  
    1.54 -  // Replace the trailing four bytes with a CPC index for the dynamic
    1.55 -  // call site.  Unlike other CPC entries, there is one per bytecode,
    1.56 -  // not just one per distinct CP entry.  In other words, the
    1.57 -  // CPC-to-CP relation is many-to-one for invokedynamic entries.
    1.58 -  // This means we must use a larger index size than u2 to address
    1.59 -  // all these entries.  That is the main reason invokedynamic
    1.60 -  // must have a five-byte instruction format.  (Of course, other JVM
    1.61 -  // implementations can use the bytes for other purposes.)
    1.62 -  Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
    1.63 -  // Note: We use native_u4 format exclusively for 4-byte indexes.
    1.64 +    // Replace the trailing four bytes with a CPC index for the dynamic
    1.65 +    // call site.  Unlike other CPC entries, there is one per bytecode,
    1.66 +    // not just one per distinct CP entry.  In other words, the
    1.67 +    // CPC-to-CP relation is many-to-one for invokedynamic entries.
    1.68 +    // This means we must use a larger index size than u2 to address
    1.69 +    // all these entries.  That is the main reason invokedynamic
    1.70 +    // must have a five-byte instruction format.  (Of course, other JVM
    1.71 +    // implementations can use the bytes for other purposes.)
    1.72 +    Bytes::put_native_u4(p, constantPoolCacheOopDesc::encode_secondary_index(cpc2));
    1.73 +    // Note: We use native_u4 format exclusively for 4-byte indexes.
    1.74 +  } else {
    1.75 +    int cache_index = constantPoolCacheOopDesc::decode_secondary_index(
    1.76 +                        Bytes::get_native_u4(p));
    1.77 +    int secondary_index = cp_cache_secondary_entry_main_index(cache_index);
    1.78 +    int pool_index = cp_cache_entry_pool_index(secondary_index);
    1.79 +    assert(_pool->tag_at(pool_index).is_invoke_dynamic(), "wrong index");
    1.80 +    // zero out 4 bytes
    1.81 +    Bytes::put_Java_u4(p, 0);
    1.82 +    Bytes::put_Java_u2(p, pool_index);
    1.83 +  }
    1.84  }
    1.85  
    1.86  
    1.87  // Rewrite some ldc bytecodes to _fast_aldc
    1.88 -void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide) {
    1.89 -  assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "");
    1.90 -  address p = bcp + offset;
    1.91 -  int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
    1.92 -  constantTag tag = _pool->tag_at(cp_index).value();
    1.93 -  if (tag.is_method_handle() || tag.is_method_type()) {
    1.94 -    int cache_index = cp_entry_to_cp_cache(cp_index);
    1.95 -    if (is_wide) {
    1.96 -      (*bcp) = Bytecodes::_fast_aldc_w;
    1.97 -      assert(cache_index == (u2)cache_index, "");
    1.98 -      Bytes::put_native_u2(p, cache_index);
    1.99 -    } else {
   1.100 -      (*bcp) = Bytecodes::_fast_aldc;
   1.101 -      assert(cache_index == (u1)cache_index, "");
   1.102 -      (*p) = (u1)cache_index;
   1.103 +void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
   1.104 +                                 bool reverse) {
   1.105 +  if (!reverse) {
   1.106 +    assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode");
   1.107 +    address p = bcp + offset;
   1.108 +    int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p);
   1.109 +    constantTag tag = _pool->tag_at(cp_index).value();
   1.110 +    if (tag.is_method_handle() || tag.is_method_type()) {
   1.111 +      int cache_index = cp_entry_to_cp_cache(cp_index);
   1.112 +      if (is_wide) {
   1.113 +        (*bcp) = Bytecodes::_fast_aldc_w;
   1.114 +        assert(cache_index == (u2)cache_index, "index overflow");
   1.115 +        Bytes::put_native_u2(p, cache_index);
   1.116 +      } else {
   1.117 +        (*bcp) = Bytecodes::_fast_aldc;
   1.118 +        assert(cache_index == (u1)cache_index, "index overflow");
   1.119 +        (*p) = (u1)cache_index;
   1.120 +      }
   1.121 +    }
   1.122 +  } else {
   1.123 +    Bytecodes::Code rewritten_bc =
   1.124 +              (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc);
   1.125 +    if ((*bcp) == rewritten_bc) {
   1.126 +      address p = bcp + offset;
   1.127 +      int cache_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p);
   1.128 +      int pool_index = cp_cache_entry_pool_index(cache_index);
   1.129 +      if (is_wide) {
   1.130 +        (*bcp) = Bytecodes::_ldc_w;
   1.131 +        assert(pool_index == (u2)pool_index, "index overflow");
   1.132 +        Bytes::put_Java_u2(p, pool_index);
   1.133 +      } else {
   1.134 +        (*bcp) = Bytecodes::_ldc;
   1.135 +        assert(pool_index == (u1)pool_index, "index overflow");
   1.136 +        (*p) = (u1)pool_index;
   1.137 +      }
   1.138      }
   1.139    }
   1.140  }
   1.141  
   1.142  
   1.143  // Rewrites a method given the index_map information
   1.144 -void Rewriter::scan_method(methodOop method) {
   1.145 +void Rewriter::scan_method(methodOop method, bool reverse) {
   1.146  
   1.147    int nof_jsrs = 0;
   1.148    bool has_monitor_bytecodes = false;
   1.149 @@ -236,6 +282,13 @@
   1.150  #endif
   1.151            break;
   1.152          }
   1.153 +        case Bytecodes::_fast_linearswitch:
   1.154 +        case Bytecodes::_fast_binaryswitch: {
   1.155 +#ifndef CC_INTERP
   1.156 +          (*bcp) = Bytecodes::_lookupswitch;
   1.157 +#endif
   1.158 +          break;
   1.159 +        }
   1.160          case Bytecodes::_getstatic      : // fall through
   1.161          case Bytecodes::_putstatic      : // fall through
   1.162          case Bytecodes::_getfield       : // fall through
   1.163 @@ -244,16 +297,18 @@
   1.164          case Bytecodes::_invokespecial  : // fall through
   1.165          case Bytecodes::_invokestatic   :
   1.166          case Bytecodes::_invokeinterface:
   1.167 -          rewrite_member_reference(bcp, prefix_length+1);
   1.168 +          rewrite_member_reference(bcp, prefix_length+1, reverse);
   1.169            break;
   1.170          case Bytecodes::_invokedynamic:
   1.171 -          rewrite_invokedynamic(bcp, prefix_length+1);
   1.172 +          rewrite_invokedynamic(bcp, prefix_length+1, reverse);
   1.173            break;
   1.174          case Bytecodes::_ldc:
   1.175 -          maybe_rewrite_ldc(bcp, prefix_length+1, false);
   1.176 +        case Bytecodes::_fast_aldc:
   1.177 +          maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse);
   1.178            break;
   1.179          case Bytecodes::_ldc_w:
   1.180 -          maybe_rewrite_ldc(bcp, prefix_length+1, true);
   1.181 +        case Bytecodes::_fast_aldc_w:
   1.182 +          maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse);
   1.183            break;
   1.184          case Bytecodes::_jsr            : // fall through
   1.185          case Bytecodes::_jsr_w          : nof_jsrs++;                   break;
   1.186 @@ -273,12 +328,13 @@
   1.187    if (nof_jsrs > 0) {
   1.188      method->set_has_jsrs();
   1.189      // Second pass will revisit this method.
   1.190 -    assert(method->has_jsrs(), "");
   1.191 +    assert(method->has_jsrs(), "didn't we just set this?");
   1.192    }
   1.193  }
   1.194  
   1.195  // After constant pool is created, revisit methods containing jsrs.
   1.196  methodHandle Rewriter::rewrite_jsrs(methodHandle method, TRAPS) {
   1.197 +  ResourceMark rm(THREAD);
   1.198    ResolveOopMapConflicts romc(method);
   1.199    methodHandle original_method = method;
   1.200    method = romc.do_potential_rewrite(CHECK_(methodHandle()));
   1.201 @@ -300,7 +356,6 @@
   1.202    return method;
   1.203  }
   1.204  
   1.205 -
   1.206  void Rewriter::rewrite(instanceKlassHandle klass, TRAPS) {
   1.207    ResourceMark rm(THREAD);
   1.208    Rewriter     rw(klass, klass->constants(), klass->methods(), CHECK);
   1.209 @@ -343,34 +398,57 @@
   1.210    }
   1.211  
   1.212    // rewrite methods, in two passes
   1.213 -  int i, len = _methods->length();
   1.214 +  int len = _methods->length();
   1.215  
   1.216 -  for (i = len; --i >= 0; ) {
   1.217 +  for (int i = len-1; i >= 0; i--) {
   1.218      methodOop method = (methodOop)_methods->obj_at(i);
   1.219      scan_method(method);
   1.220    }
   1.221  
   1.222    // allocate constant pool cache, now that we've seen all the bytecodes
   1.223 -  make_constant_pool_cache(CHECK);
   1.224 +  make_constant_pool_cache(THREAD);
   1.225  
   1.226 -  for (i = len; --i >= 0; ) {
   1.227 -    methodHandle m(THREAD, (methodOop)_methods->obj_at(i));
   1.228 +  // Restore bytecodes to their unrewritten state if there are exceptions
   1.229 +  // rewriting bytecodes or allocating the cpCache
   1.230 +  if (HAS_PENDING_EXCEPTION) {
   1.231 +    restore_bytecodes();
   1.232 +    return;
   1.233 +  }
   1.234 +}
   1.235 +
   1.236 +// Relocate jsr/rets in a method.  This can't be done with the rewriter
   1.237 +// stage because it can throw other exceptions, leaving the bytecodes
   1.238 +// pointing at constant pool cache entries.
   1.239 +// Link and check jvmti dependencies while we're iterating over the methods.
   1.240 +// JSR292 code calls with a different set of methods, so two entry points.
   1.241 +void Rewriter::relocate_and_link(instanceKlassHandle this_oop, TRAPS) {
   1.242 +  objArrayHandle methods(THREAD, this_oop->methods());
   1.243 +  relocate_and_link(this_oop, methods, THREAD);
   1.244 +}
   1.245 +
   1.246 +void Rewriter::relocate_and_link(instanceKlassHandle this_oop,
   1.247 +                                 objArrayHandle methods, TRAPS) {
   1.248 +  int len = methods->length();
   1.249 +  for (int i = len-1; i >= 0; i--) {
   1.250 +    methodHandle m(THREAD, (methodOop)methods->obj_at(i));
   1.251  
   1.252      if (m->has_jsrs()) {
   1.253        m = rewrite_jsrs(m, CHECK);
   1.254        // Method might have gotten rewritten.
   1.255 -      _methods->obj_at_put(i, m());
   1.256 +      methods->obj_at_put(i, m());
   1.257      }
   1.258  
   1.259 -    // Set up method entry points for compiler and interpreter.
   1.260 +    // Set up method entry points for compiler and interpreter    .
   1.261      m->link_method(m, CHECK);
   1.262  
   1.263 +    // This is for JVMTI and unrelated to relocator but the last thing we do
   1.264  #ifdef ASSERT
   1.265      if (StressMethodComparator) {
   1.266        static int nmc = 0;
   1.267        for (int j = i; j >= 0 && j >= i-4; j--) {
   1.268          if ((++nmc % 1000) == 0)  tty->print_cr("Have run MethodComparator %d times...", nmc);
   1.269 -        bool z = MethodComparator::methods_EMCP(m(), (methodOop)_methods->obj_at(j));
   1.270 +        bool z = MethodComparator::methods_EMCP(m(),
   1.271 +                   (methodOop)methods->obj_at(j));
   1.272          if (j == i && !z) {
   1.273            tty->print("MethodComparator FAIL: "); m->print(); m->print_codes();
   1.274            assert(z, "method must compare equal to itself");
     2.1 --- a/src/share/vm/interpreter/rewriter.hpp	Fri May 20 22:27:48 2011 -0700
     2.2 +++ b/src/share/vm/interpreter/rewriter.hpp	Sat May 21 15:39:54 2011 -0700
     2.3 @@ -85,13 +85,15 @@
     2.4  
     2.5    void compute_index_maps();
     2.6    void make_constant_pool_cache(TRAPS);
     2.7 -  void scan_method(methodOop m);
     2.8 -  methodHandle rewrite_jsrs(methodHandle m, TRAPS);
     2.9 +  void scan_method(methodOop m, bool reverse = false);
    2.10    void rewrite_Object_init(methodHandle m, TRAPS);
    2.11 -  void rewrite_member_reference(address bcp, int offset);
    2.12 -  void rewrite_invokedynamic(address bcp, int offset);
    2.13 -  void maybe_rewrite_ldc(address bcp, int offset, bool is_wide);
    2.14 +  void rewrite_member_reference(address bcp, int offset, bool reverse = false);
    2.15 +  void rewrite_invokedynamic(address bcp, int offset, bool reverse = false);
    2.16 +  void maybe_rewrite_ldc(address bcp, int offset, bool is_wide, bool reverse = false);
    2.17 +  // Revert bytecodes in case of an exception.
    2.18 +  void restore_bytecodes();
    2.19  
    2.20 +  static methodHandle rewrite_jsrs(methodHandle m, TRAPS);
    2.21   public:
    2.22    // Driver routine:
    2.23    static void rewrite(instanceKlassHandle klass, TRAPS);
    2.24 @@ -100,6 +102,13 @@
    2.25    enum {
    2.26      _secondary_entry_tag = nth_bit(30)
    2.27    };
    2.28 +
    2.29 +  // Second pass, not gated by is_rewritten flag
    2.30 +  static void relocate_and_link(instanceKlassHandle klass, TRAPS);
    2.31 +  // JSR292 version to call with it's own methods.
    2.32 +  static void relocate_and_link(instanceKlassHandle klass,
    2.33 +                                objArrayHandle methods, TRAPS);
    2.34 +
    2.35  };
    2.36  
    2.37  #endif // SHARE_VM_INTERPRETER_REWRITER_HPP
     3.1 --- a/src/share/vm/oops/instanceKlass.cpp	Fri May 20 22:27:48 2011 -0700
     3.2 +++ b/src/share/vm/oops/instanceKlass.cpp	Sat May 21 15:39:54 2011 -0700
     3.3 @@ -335,6 +335,9 @@
     3.4          this_oop->rewrite_class(CHECK_false);
     3.5        }
     3.6  
     3.7 +      // relocate jsrs and link methods after they are all rewritten
     3.8 +      this_oop->relocate_and_link_methods(CHECK_false);
     3.9 +
    3.10        // Initialize the vtable and interface table after
    3.11        // methods have been rewritten since rewrite may
    3.12        // fabricate new methodOops.
    3.13 @@ -365,17 +368,8 @@
    3.14  
    3.15  
    3.16  // Rewrite the byte codes of all of the methods of a class.
    3.17 -// Three cases:
    3.18 -//    During the link of a newly loaded class.
    3.19 -//    During the preloading of classes to be written to the shared spaces.
    3.20 -//      - Rewrite the methods and update the method entry points.
    3.21 -//
    3.22 -//    During the link of a class in the shared spaces.
    3.23 -//      - The methods were already rewritten, update the metho entry points.
    3.24 -//
    3.25  // The rewriter must be called exactly once. Rewriting must happen after
    3.26  // verification but before the first method of the class is executed.
    3.27 -
    3.28  void instanceKlass::rewrite_class(TRAPS) {
    3.29    assert(is_loaded(), "must be loaded");
    3.30    instanceKlassHandle this_oop(THREAD, this->as_klassOop());
    3.31 @@ -383,10 +377,19 @@
    3.32      assert(this_oop()->is_shared(), "rewriting an unshared class?");
    3.33      return;
    3.34    }
    3.35 -  Rewriter::rewrite(this_oop, CHECK); // No exception can happen here
    3.36 +  Rewriter::rewrite(this_oop, CHECK);
    3.37    this_oop->set_rewritten();
    3.38  }
    3.39  
    3.40 +// Now relocate and link method entry points after class is rewritten.
    3.41 +// This is outside is_rewritten flag. In case of an exception, it can be
    3.42 +// executed more than once.
    3.43 +void instanceKlass::relocate_and_link_methods(TRAPS) {
    3.44 +  assert(is_loaded(), "must be loaded");
    3.45 +  instanceKlassHandle this_oop(THREAD, this->as_klassOop());
    3.46 +  Rewriter::relocate_and_link(this_oop, CHECK);
    3.47 +}
    3.48 +
    3.49  
    3.50  void instanceKlass::initialize_impl(instanceKlassHandle this_oop, TRAPS) {
    3.51    // Make sure klass is linked (verified) before initialization
     4.1 --- a/src/share/vm/oops/instanceKlass.hpp	Fri May 20 22:27:48 2011 -0700
     4.2 +++ b/src/share/vm/oops/instanceKlass.hpp	Sat May 21 15:39:54 2011 -0700
     4.3 @@ -392,6 +392,7 @@
     4.4    bool link_class_or_fail(TRAPS); // returns false on failure
     4.5    void unlink_class();
     4.6    void rewrite_class(TRAPS);
     4.7 +  void relocate_and_link_methods(TRAPS);
     4.8    methodOop class_initializer();
     4.9  
    4.10    // set the class to initialized if no static initializer is present
     5.1 --- a/src/share/vm/oops/methodOop.cpp	Fri May 20 22:27:48 2011 -0700
     5.2 +++ b/src/share/vm/oops/methodOop.cpp	Sat May 21 15:39:54 2011 -0700
     5.3 @@ -693,7 +693,10 @@
     5.4  // Called when the method_holder is getting linked. Setup entrypoints so the method
     5.5  // is ready to be called from interpreter, compiler, and vtables.
     5.6  void methodOopDesc::link_method(methodHandle h_method, TRAPS) {
     5.7 -  assert(_i2i_entry == NULL, "should only be called once");
     5.8 +  // If the code cache is full, we may reenter this function for the
     5.9 +  // leftover methods that weren't linked.
    5.10 +  if (_i2i_entry != NULL) return;
    5.11 +
    5.12    assert(_adapter == NULL, "init'd to NULL" );
    5.13    assert( _code == NULL, "nothing compiled yet" );
    5.14  
    5.15 @@ -717,7 +720,7 @@
    5.16    // called from the vtable.  We need adapters on such methods that get loaded
    5.17    // later.  Ditto for mega-morphic itable calls.  If this proves to be a
    5.18    // problem we'll make these lazily later.
    5.19 -  (void) make_adapters(h_method, CHECK);
    5.20 +  if (UseCompiler) (void) make_adapters(h_method, CHECK);
    5.21  
    5.22    // ONLY USE the h_method now as make_adapter may have blocked
    5.23  
     6.1 --- a/src/share/vm/prims/jvmtiRedefineClasses.cpp	Fri May 20 22:27:48 2011 -0700
     6.2 +++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Sat May 21 15:39:54 2011 -0700
     6.3 @@ -992,6 +992,9 @@
     6.4      }
     6.5  
     6.6      Rewriter::rewrite(scratch_class, THREAD);
     6.7 +    if (!HAS_PENDING_EXCEPTION) {
     6.8 +      Rewriter::relocate_and_link(scratch_class, THREAD);
     6.9 +    }
    6.10      if (HAS_PENDING_EXCEPTION) {
    6.11        Symbol* ex_name = PENDING_EXCEPTION->klass()->klass_part()->name();
    6.12        CLEAR_PENDING_EXCEPTION;
     7.1 --- a/src/share/vm/prims/methodHandleWalk.cpp	Fri May 20 22:27:48 2011 -0700
     7.2 +++ b/src/share/vm/prims/methodHandleWalk.cpp	Sat May 21 15:39:54 2011 -0700
     7.3 @@ -1399,6 +1399,7 @@
     7.4    objArrayHandle methods(THREAD, m_array);
     7.5    methods->obj_at_put(0, m());
     7.6    Rewriter::rewrite(_target_klass(), cpool, methods, CHECK_(empty));  // Use fake class.
     7.7 +  Rewriter::relocate_and_link(_target_klass(), methods, CHECK_(empty));  // Use fake class.
     7.8  
     7.9    // Set the invocation counter's count to the invoke count of the
    7.10    // original call site.

mercurial