Tue, 24 Jun 2014 08:12:30 +0200
8035328: closed/compiler/6595044/Main.java failed with timeout
Summary: Patch call sites of non-entrant methods to avoid re-resolving if method is still executed.
Reviewed-by: kvn
src/share/vm/runtime/sharedRuntime.cpp | file | annotate | diff | comparison | revisions |
1.1 --- a/src/share/vm/runtime/sharedRuntime.cpp Tue Sep 16 11:03:19 2014 +0200 1.2 +++ b/src/share/vm/runtime/sharedRuntime.cpp Tue Jun 24 08:12:30 2014 +0200 1.3 @@ -1209,10 +1209,7 @@ 1.4 (!is_virtual && invoke_code == Bytecodes::_invokedynamic) || 1.5 ( is_virtual && invoke_code != Bytecodes::_invokestatic ), "inconsistent bytecode"); 1.6 1.7 - // We do not patch the call site if the caller nmethod has been made non-entrant. 1.8 - if (!caller_nm->is_in_use()) { 1.9 - return callee_method; 1.10 - } 1.11 + assert(caller_nm->is_alive(), "It should be alive"); 1.12 1.13 #ifndef PRODUCT 1.14 // tracing/debugging/statistics 1.15 @@ -1282,13 +1279,11 @@ 1.16 1.17 // Now that we are ready to patch if the Method* was redefined then 1.18 // don't update call site and let the caller retry. 1.19 - // Don't update call site if caller nmethod has been made non-entrant 1.20 - // as it is a waste of time. 1.21 // Don't update call site if callee nmethod was unloaded or deoptimized. 1.22 // Don't update call site if callee nmethod was replaced by an other nmethod 1.23 // which may happen when multiply alive nmethod (tiered compilation) 1.24 // will be supported. 1.25 - if (!callee_method->is_old() && caller_nm->is_in_use() && 1.26 + if (!callee_method->is_old() && 1.27 (callee_nm == NULL || callee_nm->is_in_use() && (callee_method->code() == callee_nm))) { 1.28 #ifdef ASSERT 1.29 // We must not try to patch to jump to an already unloaded method. 1.30 @@ -1489,14 +1484,12 @@ 1.31 // out of scope. 1.32 JvmtiDynamicCodeEventCollector event_collector; 1.33 1.34 - // Update inline cache to megamorphic. Skip update if caller has been 1.35 - // made non-entrant or we are called from interpreted. 1.36 + // Update inline cache to megamorphic. Skip update if we are called from interpreted. 1.37 { MutexLocker ml_patch (CompiledIC_lock); 1.38 RegisterMap reg_map(thread, false); 1.39 frame caller_frame = thread->last_frame().sender(®_map); 1.40 CodeBlob* cb = caller_frame.cb(); 1.41 - if (cb->is_nmethod() && ((nmethod*)cb)->is_in_use()) { 1.42 - // Not a non-entrant nmethod, so find inline_cache 1.43 + if (cb->is_nmethod()) { 1.44 CompiledIC* inline_cache = CompiledIC_before(((nmethod*)cb), caller_frame.pc()); 1.45 bool should_be_mono = false; 1.46 if (inline_cache->is_optimized()) { 1.47 @@ -1639,19 +1632,13 @@ 1.48 // resolve is only done once. 1.49 1.50 MutexLocker ml(CompiledIC_lock); 1.51 - // 1.52 - // We do not patch the call site if the nmethod has been made non-entrant 1.53 - // as it is a waste of time 1.54 - // 1.55 - if (caller_nm->is_in_use()) { 1.56 - if (is_static_call) { 1.57 - CompiledStaticCall* ssc= compiledStaticCall_at(call_addr); 1.58 - ssc->set_to_clean(); 1.59 - } else { 1.60 - // compiled, dispatched call (which used to call an interpreted method) 1.61 - CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr); 1.62 - inline_cache->set_to_clean(); 1.63 - } 1.64 + if (is_static_call) { 1.65 + CompiledStaticCall* ssc= compiledStaticCall_at(call_addr); 1.66 + ssc->set_to_clean(); 1.67 + } else { 1.68 + // compiled, dispatched call (which used to call an interpreted method) 1.69 + CompiledIC* inline_cache = CompiledIC_at(caller_nm, call_addr); 1.70 + inline_cache->set_to_clean(); 1.71 } 1.72 } 1.73