src/share/vm/code/nmethod.cpp

changeset 6992
2c6ef90f030a
parent 6991
882004b9e7e1
child 7333
b12a2a9b05ca
     1.1 --- a/src/share/vm/code/nmethod.cpp	Tue Jul 01 09:03:55 2014 +0200
     1.2 +++ b/src/share/vm/code/nmethod.cpp	Mon Jul 07 10:12:40 2014 +0200
     1.3 @@ -49,6 +49,8 @@
     1.4  
     1.5  PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
     1.6  
     1.7 +unsigned char nmethod::_global_unloading_clock = 0;
     1.8 +
     1.9  #ifdef DTRACE_ENABLED
    1.10  
    1.11  // Only bother with this argument setup if dtrace is available
    1.12 @@ -466,6 +468,7 @@
    1.13  // Fill in default values for various flag fields
    1.14  void nmethod::init_defaults() {
    1.15    _state                      = in_use;
    1.16 +  _unloading_clock            = 0;
    1.17    _marked_for_reclamation     = 0;
    1.18    _has_flushed_dependencies   = 0;
    1.19    _has_unsafe_access          = 0;
    1.20 @@ -484,7 +487,11 @@
    1.21    _oops_do_mark_link       = NULL;
    1.22    _jmethod_id              = NULL;
    1.23    _osr_link                = NULL;
    1.24 -  _scavenge_root_link      = NULL;
    1.25 +  if (UseG1GC) {
    1.26 +    _unloading_next        = NULL;
    1.27 +  } else {
    1.28 +    _scavenge_root_link    = NULL;
    1.29 +  }
    1.30    _scavenge_root_state     = 0;
    1.31    _compiler                = NULL;
    1.32  #if INCLUDE_RTM_OPT
    1.33 @@ -1190,6 +1197,77 @@
    1.34    }
    1.35  }
    1.36  
    1.37 +void nmethod::verify_clean_inline_caches() {
    1.38 +  assert_locked_or_safepoint(CompiledIC_lock);
    1.39 +
    1.40 +  // If the method is not entrant or zombie then a JMP is plastered over the
    1.41 +  // first few bytes.  If an oop in the old code was there, that oop
    1.42 +  // should not get GC'd.  Skip the first few bytes of oops on
    1.43 +  // not-entrant methods.
    1.44 +  address low_boundary = verified_entry_point();
    1.45 +  if (!is_in_use()) {
    1.46 +    low_boundary += NativeJump::instruction_size;
    1.47 +    // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
    1.48 +    // This means that the low_boundary is going to be a little too high.
    1.49 +    // This shouldn't matter, since oops of non-entrant methods are never used.
    1.50 +    // In fact, why are we bothering to look at oops in a non-entrant method??
    1.51 +  }
    1.52 +
    1.53 +  ResourceMark rm;
    1.54 +  RelocIterator iter(this, low_boundary);
    1.55 +  while(iter.next()) {
    1.56 +    switch(iter.type()) {
    1.57 +      case relocInfo::virtual_call_type:
    1.58 +      case relocInfo::opt_virtual_call_type: {
    1.59 +        CompiledIC *ic = CompiledIC_at(&iter);
    1.60 +        // Ok, to lookup references to zombies here
    1.61 +        CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination());
    1.62 +        if( cb != NULL && cb->is_nmethod() ) {
    1.63 +          nmethod* nm = (nmethod*)cb;
    1.64 +          // Verify that inline caches pointing to both zombie and not_entrant methods are clean
    1.65 +          if (!nm->is_in_use() || (nm->method()->code() != nm)) {
    1.66 +            assert(ic->is_clean(), "IC should be clean");
    1.67 +          }
    1.68 +        }
    1.69 +        break;
    1.70 +      }
    1.71 +      case relocInfo::static_call_type: {
    1.72 +        CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc());
    1.73 +        CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination());
    1.74 +        if( cb != NULL && cb->is_nmethod() ) {
    1.75 +          nmethod* nm = (nmethod*)cb;
    1.76 +          // Verify that inline caches pointing to both zombie and not_entrant methods are clean
    1.77 +          if (!nm->is_in_use() || (nm->method()->code() != nm)) {
    1.78 +            assert(csc->is_clean(), "IC should be clean");
    1.79 +          }
    1.80 +        }
    1.81 +        break;
    1.82 +      }
    1.83 +    }
    1.84 +  }
    1.85 +}
    1.86 +
    1.87 +int nmethod::verify_icholder_relocations() {
    1.88 +  int count = 0;
    1.89 +
    1.90 +  RelocIterator iter(this);
    1.91 +  while(iter.next()) {
    1.92 +    if (iter.type() == relocInfo::virtual_call_type) {
    1.93 +      if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc())) {
    1.94 +        CompiledIC *ic = CompiledIC_at(&iter);
    1.95 +        if (TraceCompiledIC) {
    1.96 +          tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder()));
    1.97 +          ic->print();
    1.98 +        }
    1.99 +        assert(ic->cached_icholder() != NULL, "must be non-NULL");
   1.100 +        count++;
   1.101 +      }
   1.102 +    }
   1.103 +  }
   1.104 +
   1.105 +  return count;
   1.106 +}
   1.107 +
   1.108  // This is a private interface with the sweeper.
   1.109  void nmethod::mark_as_seen_on_stack() {
   1.110    assert(is_alive(), "Must be an alive method");
   1.111 @@ -1222,6 +1300,23 @@
   1.112    mdo->inc_decompile_count();
   1.113  }
   1.114  
   1.115 +void nmethod::increase_unloading_clock() {
   1.116 +  _global_unloading_clock++;
   1.117 +  if (_global_unloading_clock == 0) {
   1.118 +    // _nmethods are allocated with _unloading_clock == 0,
   1.119 +    // so 0 is never used as a clock value.
   1.120 +    _global_unloading_clock = 1;
   1.121 +  }
   1.122 +}
   1.123 +
   1.124 +void nmethod::set_unloading_clock(unsigned char unloading_clock) {
   1.125 +  OrderAccess::release_store((volatile jubyte*)&_unloading_clock, unloading_clock);
   1.126 +}
   1.127 +
   1.128 +unsigned char nmethod::unloading_clock() {
   1.129 +  return (unsigned char)OrderAccess::load_acquire((volatile jubyte*)&_unloading_clock);
   1.130 +}
   1.131 +
   1.132  void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
   1.133  
   1.134    post_compiled_method_unload();
   1.135 @@ -1267,6 +1362,10 @@
   1.136      // for later on.
   1.137      CodeCache::set_needs_cache_clean(true);
   1.138    }
   1.139 +
   1.140 +  // Unregister must be done before the state change
   1.141 +  Universe::heap()->unregister_nmethod(this);
   1.142 +
   1.143    _state = unloaded;
   1.144  
   1.145    // Log the unloading.
   1.146 @@ -1621,6 +1720,35 @@
   1.147    set_unload_reported();
   1.148  }
   1.149  
   1.150 +void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) {
   1.151 +  if (ic->is_icholder_call()) {
   1.152 +    // The only exception is compiledICHolder oops which may
   1.153 +    // yet be marked below. (We check this further below).
   1.154 +    CompiledICHolder* cichk_oop = ic->cached_icholder();
   1.155 +    if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
   1.156 +        cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
   1.157 +      return;
   1.158 +    }
   1.159 +  } else {
   1.160 +    Metadata* ic_oop = ic->cached_metadata();
   1.161 +    if (ic_oop != NULL) {
   1.162 +      if (ic_oop->is_klass()) {
   1.163 +        if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
   1.164 +          return;
   1.165 +        }
   1.166 +      } else if (ic_oop->is_method()) {
   1.167 +        if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
   1.168 +          return;
   1.169 +        }
   1.170 +      } else {
   1.171 +        ShouldNotReachHere();
   1.172 +      }
   1.173 +    }
   1.174 +  }
   1.175 +
   1.176 +  ic->set_to_clean();
   1.177 +}
   1.178 +
   1.179  // This is called at the end of the strong tracing/marking phase of a
   1.180  // GC to unload an nmethod if it contains otherwise unreachable
   1.181  // oops.
   1.182 @@ -1664,31 +1792,7 @@
   1.183      while(iter.next()) {
   1.184        if (iter.type() == relocInfo::virtual_call_type) {
   1.185          CompiledIC *ic = CompiledIC_at(&iter);
   1.186 -        if (ic->is_icholder_call()) {
   1.187 -          // The only exception is compiledICHolder oops which may
   1.188 -          // yet be marked below. (We check this further below).
   1.189 -          CompiledICHolder* cichk_oop = ic->cached_icholder();
   1.190 -          if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
   1.191 -              cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
   1.192 -            continue;
   1.193 -          }
   1.194 -        } else {
   1.195 -          Metadata* ic_oop = ic->cached_metadata();
   1.196 -          if (ic_oop != NULL) {
   1.197 -            if (ic_oop->is_klass()) {
   1.198 -              if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
   1.199 -                continue;
   1.200 -              }
   1.201 -            } else if (ic_oop->is_method()) {
   1.202 -              if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
   1.203 -                continue;
   1.204 -              }
   1.205 -            } else {
   1.206 -              ShouldNotReachHere();
   1.207 -            }
   1.208 -          }
   1.209 -        }
   1.210 -        ic->set_to_clean();
   1.211 +        clean_ic_if_metadata_is_dead(ic, is_alive);
   1.212        }
   1.213      }
   1.214    }
   1.215 @@ -1726,6 +1830,175 @@
   1.216    verify_metadata_loaders(low_boundary, is_alive);
   1.217  }
   1.218  
   1.219 +template <class CompiledICorStaticCall>
   1.220 +static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, nmethod* from) {
   1.221 +  // Ok, to lookup references to zombies here
   1.222 +  CodeBlob *cb = CodeCache::find_blob_unsafe(addr);
   1.223 +  if (cb != NULL && cb->is_nmethod()) {
   1.224 +    nmethod* nm = (nmethod*)cb;
   1.225 +
   1.226 +    if (nm->unloading_clock() != nmethod::global_unloading_clock()) {
   1.227 +      // The nmethod has not been processed yet.
   1.228 +      return true;
   1.229 +    }
   1.230 +
   1.231 +    // Clean inline caches pointing to both zombie and not_entrant methods
   1.232 +    if (!nm->is_in_use() || (nm->method()->code() != nm)) {
   1.233 +      ic->set_to_clean();
   1.234 +      assert(ic->is_clean(), err_msg("nmethod " PTR_FORMAT "not clean %s", from, from->method()->name_and_sig_as_C_string()));
   1.235 +    }
   1.236 +  }
   1.237 +
   1.238 +  return false;
   1.239 +}
   1.240 +
   1.241 +static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, nmethod* from) {
   1.242 +  return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from);
   1.243 +}
   1.244 +
   1.245 +static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, nmethod* from) {
   1.246 +  return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
   1.247 +}
   1.248 +
   1.249 +bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
   1.250 +  ResourceMark rm;
   1.251 +
   1.252 +  // Make sure the oop's ready to receive visitors
   1.253 +  assert(!is_zombie() && !is_unloaded(),
   1.254 +         "should not call follow on zombie or unloaded nmethod");
   1.255 +
   1.256 +  // If the method is not entrant then a JMP is plastered over the
   1.257 +  // first few bytes.  If an oop in the old code was there, that oop
   1.258 +  // should not get GC'd.  Skip the first few bytes of oops on
   1.259 +  // not-entrant methods.
   1.260 +  address low_boundary = verified_entry_point();
   1.261 +  if (is_not_entrant()) {
   1.262 +    low_boundary += NativeJump::instruction_size;
   1.263 +    // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
   1.264 +    // (See comment above.)
   1.265 +  }
   1.266 +
   1.267 +  // The RedefineClasses() API can cause the class unloading invariant
   1.268 +  // to no longer be true. See jvmtiExport.hpp for details.
   1.269 +  // Also, leave a debugging breadcrumb in local flag.
   1.270 +  bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
   1.271 +  if (a_class_was_redefined) {
   1.272 +    // This set of the unloading_occurred flag is done before the
   1.273 +    // call to post_compiled_method_unload() so that the unloading
   1.274 +    // of this nmethod is reported.
   1.275 +    unloading_occurred = true;
   1.276 +  }
   1.277 +
   1.278 +  // Exception cache
   1.279 +  clean_exception_cache(is_alive);
   1.280 +
   1.281 +  bool is_unloaded = false;
   1.282 +  bool postponed = false;
   1.283 +
   1.284 +  RelocIterator iter(this, low_boundary);
   1.285 +  while(iter.next()) {
   1.286 +
   1.287 +    switch (iter.type()) {
   1.288 +
   1.289 +    case relocInfo::virtual_call_type:
   1.290 +      if (unloading_occurred) {
   1.291 +        // If class unloading occurred we first iterate over all inline caches and
   1.292 +        // clear ICs where the cached oop is referring to an unloaded klass or method.
   1.293 +        clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
   1.294 +      }
   1.295 +
   1.296 +      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
   1.297 +      break;
   1.298 +
   1.299 +    case relocInfo::opt_virtual_call_type:
   1.300 +      postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
   1.301 +      break;
   1.302 +
   1.303 +    case relocInfo::static_call_type:
   1.304 +      postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
   1.305 +      break;
   1.306 +
   1.307 +    case relocInfo::oop_type:
   1.308 +      if (!is_unloaded) {
   1.309 +        // Unload check
   1.310 +        oop_Relocation* r = iter.oop_reloc();
   1.311 +        // Traverse those oops directly embedded in the code.
   1.312 +        // Other oops (oop_index>0) are seen as part of scopes_oops.
   1.313 +        assert(1 == (r->oop_is_immediate()) +
   1.314 +                  (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
   1.315 +              "oop must be found in exactly one place");
   1.316 +        if (r->oop_is_immediate() && r->oop_value() != NULL) {
   1.317 +          if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
   1.318 +            is_unloaded = true;
   1.319 +          }
   1.320 +        }
   1.321 +      }
   1.322 +      break;
   1.323 +
   1.324 +    }
   1.325 +  }
   1.326 +
   1.327 +  if (is_unloaded) {
   1.328 +    return postponed;
   1.329 +  }
   1.330 +
   1.331 +  // Scopes
   1.332 +  for (oop* p = oops_begin(); p < oops_end(); p++) {
   1.333 +    if (*p == Universe::non_oop_word())  continue;  // skip non-oops
   1.334 +    if (can_unload(is_alive, p, unloading_occurred)) {
   1.335 +      is_unloaded = true;
   1.336 +      break;
   1.337 +    }
   1.338 +  }
   1.339 +
   1.340 +  if (is_unloaded) {
   1.341 +    return postponed;
   1.342 +  }
   1.343 +
   1.344 +  // Ensure that all metadata is still alive
   1.345 +  verify_metadata_loaders(low_boundary, is_alive);
   1.346 +
   1.347 +  return postponed;
   1.348 +}
   1.349 +
   1.350 +void nmethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) {
   1.351 +  ResourceMark rm;
   1.352 +
   1.353 +  // Make sure the oop's ready to receive visitors
   1.354 +  assert(!is_zombie(),
   1.355 +         "should not call follow on zombie nmethod");
   1.356 +
   1.357 +  // If the method is not entrant then a JMP is plastered over the
   1.358 +  // first few bytes.  If an oop in the old code was there, that oop
   1.359 +  // should not get GC'd.  Skip the first few bytes of oops on
   1.360 +  // not-entrant methods.
   1.361 +  address low_boundary = verified_entry_point();
   1.362 +  if (is_not_entrant()) {
   1.363 +    low_boundary += NativeJump::instruction_size;
   1.364 +    // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
   1.365 +    // (See comment above.)
   1.366 +  }
   1.367 +
   1.368 +  RelocIterator iter(this, low_boundary);
   1.369 +  while(iter.next()) {
   1.370 +
   1.371 +    switch (iter.type()) {
   1.372 +
   1.373 +    case relocInfo::virtual_call_type:
   1.374 +      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
   1.375 +      break;
   1.376 +
   1.377 +    case relocInfo::opt_virtual_call_type:
   1.378 +      clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
   1.379 +      break;
   1.380 +
   1.381 +    case relocInfo::static_call_type:
   1.382 +      clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
   1.383 +      break;
   1.384 +    }
   1.385 +  }
   1.386 +}
   1.387 +
   1.388  #ifdef ASSERT
   1.389  
   1.390  class CheckClass : AllStatic {
   1.391 @@ -1942,7 +2215,7 @@
   1.392      assert(cur != NULL, "not NULL-terminated");
   1.393      nmethod* next = cur->_oops_do_mark_link;
   1.394      cur->_oops_do_mark_link = NULL;
   1.395 -    cur->fix_oop_relocations();
   1.396 +    cur->verify_oop_relocations();
   1.397      NOT_PRODUCT(if (TraceScavenge)  cur->print_on(tty, "oops_do, unmark"));
   1.398      cur = next;
   1.399    }
   1.400 @@ -2484,6 +2757,10 @@
   1.401  };
   1.402  
   1.403  void nmethod::verify_scavenge_root_oops() {
   1.404 +  if (UseG1GC) {
   1.405 +    return;
   1.406 +  }
   1.407 +
   1.408    if (!on_scavenge_root_list()) {
   1.409      // Actually look inside, to verify the claim that it's clean.
   1.410      DebugScavengeRoot debug_scavenge_root(this);

mercurial