1.1 --- a/src/share/vm/code/compiledIC.cpp Fri Aug 31 16:39:35 2012 -0700 1.2 +++ b/src/share/vm/code/compiledIC.cpp Sat Sep 01 13:25:18 2012 -0400 1.3 @@ -31,8 +31,9 @@ 1.4 #include "code/vtableStubs.hpp" 1.5 #include "interpreter/interpreter.hpp" 1.6 #include "interpreter/linkResolver.hpp" 1.7 +#include "memory/metadataFactory.hpp" 1.8 #include "memory/oopFactory.hpp" 1.9 -#include "oops/methodOop.hpp" 1.10 +#include "oops/method.hpp" 1.11 #include "oops/oop.inline.hpp" 1.12 #include "oops/symbol.hpp" 1.13 #include "runtime/icache.hpp" 1.14 @@ -44,61 +45,79 @@ 1.15 // Every time a compiled IC is changed or its type is being accessed, 1.16 // either the CompiledIC_lock must be set or we must be at a safe point. 1.17 1.18 + 1.19 +// Release the CompiledICHolder* associated with this call site is there is one. 1.20 +void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) { 1.21 + // This call site might have become stale so inspect it carefully. 1.22 + NativeCall* call = nativeCall_at(call_site->addr()); 1.23 + if (is_icholder_entry(call->destination())) { 1.24 + NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value()); 1.25 + InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data()); 1.26 + } 1.27 +} 1.28 + 1.29 + 1.30 +bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) { 1.31 + // This call site might have become stale so inspect it carefully. 1.32 + NativeCall* call = nativeCall_at(call_site->addr()); 1.33 + return is_icholder_entry(call->destination()); 1.34 +} 1.35 + 1.36 + 1.37 //----------------------------------------------------------------------------- 1.38 // Low-level access to an inline cache. Private, since they might not be 1.39 // MT-safe to use. 1.40 1.41 -void CompiledIC::set_cached_oop(oop cache) { 1.42 +void* CompiledIC::cached_value() const { 1.43 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.44 - assert (!is_optimized(), "an optimized virtual call does not have a cached oop"); 1.45 - assert (cache == NULL || cache != badOop, "invalid oop"); 1.46 + assert (!is_optimized(), "an optimized virtual call does not have a cached metadata"); 1.47 + 1.48 + if (!is_in_transition_state()) { 1.49 + void* data = (void*)_value->data(); 1.50 + // If we let the metadata value here be initialized to zero... 1.51 + assert(data != NULL || Universe::non_oop_word() == NULL, 1.52 + "no raw nulls in CompiledIC metadatas, because of patching races"); 1.53 + return (data == (void*)Universe::non_oop_word()) ? NULL : data; 1.54 + } else { 1.55 + return InlineCacheBuffer::cached_value_for((CompiledIC *)this); 1.56 + } 1.57 +} 1.58 + 1.59 + 1.60 +void CompiledIC::internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder) { 1.61 + assert(entry_point != NULL, "must set legal entry point"); 1.62 + assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.63 + assert (!is_optimized() || cache == NULL, "an optimized virtual call does not have a cached metadata"); 1.64 + assert (cache == NULL || cache != (Metadata*)badOopVal, "invalid metadata"); 1.65 + 1.66 + assert(!is_icholder || is_icholder_entry(entry_point), "must be"); 1.67 + 1.68 + // Don't use ic_destination for this test since that forwards 1.69 + // through ICBuffer instead of returning the actual current state of 1.70 + // the CompiledIC. 1.71 + if (is_icholder_entry(_ic_call->destination())) { 1.72 + // When patching for the ICStub case the cached value isn't 1.73 + // overwritten until the ICStub copied into the CompiledIC during 1.74 + // the next safepoint. Make sure that the CompiledICHolder* is 1.75 + // marked for release at this point since it won't be identifiable 1.76 + // once the entry point is overwritten. 1.77 + InlineCacheBuffer::queue_for_release((CompiledICHolder*)_value->data()); 1.78 + } 1.79 1.80 if (TraceCompiledIC) { 1.81 tty->print(" "); 1.82 print_compiled_ic(); 1.83 - tty->print_cr(" changing oop to " INTPTR_FORMAT, (address)cache); 1.84 + tty->print(" changing destination to " INTPTR_FORMAT, entry_point); 1.85 + if (!is_optimized()) { 1.86 + tty->print(" changing cached %s to " INTPTR_FORMAT, is_icholder ? "icholder" : "metadata", (address)cache); 1.87 + } 1.88 + if (is_icstub) { 1.89 + tty->print(" (icstub)"); 1.90 + } 1.91 + tty->cr(); 1.92 } 1.93 1.94 - if (cache == NULL) cache = (oop)Universe::non_oop_word(); 1.95 - 1.96 - *_oop_addr = cache; 1.97 - // fix up the relocations 1.98 - RelocIterator iter = _oops; 1.99 - while (iter.next()) { 1.100 - if (iter.type() == relocInfo::oop_type) { 1.101 - oop_Relocation* r = iter.oop_reloc(); 1.102 - if (r->oop_addr() == _oop_addr) 1.103 - r->fix_oop_relocation(); 1.104 - } 1.105 - } 1.106 - return; 1.107 -} 1.108 - 1.109 - 1.110 -oop CompiledIC::cached_oop() const { 1.111 - assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.112 - assert (!is_optimized(), "an optimized virtual call does not have a cached oop"); 1.113 - 1.114 - if (!is_in_transition_state()) { 1.115 - oop data = *_oop_addr; 1.116 - // If we let the oop value here be initialized to zero... 1.117 - assert(data != NULL || Universe::non_oop_word() == NULL, 1.118 - "no raw nulls in CompiledIC oops, because of patching races"); 1.119 - return (data == (oop)Universe::non_oop_word()) ? (oop)NULL : data; 1.120 - } else { 1.121 - return InlineCacheBuffer::cached_oop_for((CompiledIC *)this); 1.122 - } 1.123 -} 1.124 - 1.125 - 1.126 -void CompiledIC::set_ic_destination(address entry_point) { 1.127 - assert(entry_point != NULL, "must set legal entry point"); 1.128 - assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.129 - if (TraceCompiledIC) { 1.130 - tty->print(" "); 1.131 - print_compiled_ic(); 1.132 - tty->print_cr(" changing destination to " INTPTR_FORMAT, entry_point); 1.133 - } 1.134 + { 1.135 MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag); 1.136 #ifdef ASSERT 1.137 CodeBlob* cb = CodeCache::find_blob_unsafe(_ic_call); 1.138 @@ -107,6 +126,25 @@ 1.139 _ic_call->set_destination_mt_safe(entry_point); 1.140 } 1.141 1.142 + if (is_optimized() || is_icstub) { 1.143 + // Optimized call sites don't have a cache value and ICStub call 1.144 + // sites only change the entry point. Changing the value in that 1.145 + // case could lead to MT safety issues. 1.146 + assert(cache == NULL, "must be null"); 1.147 + return; 1.148 + } 1.149 + 1.150 + if (cache == NULL) cache = (void*)Universe::non_oop_word(); 1.151 + 1.152 + _value->set_data((intptr_t)cache); 1.153 +} 1.154 + 1.155 + 1.156 +void CompiledIC::set_ic_destination(ICStub* stub) { 1.157 + internal_set_ic_destination(stub->code_begin(), true, NULL, false); 1.158 +} 1.159 + 1.160 + 1.161 1.162 address CompiledIC::ic_destination() const { 1.163 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.164 @@ -124,6 +162,11 @@ 1.165 } 1.166 1.167 1.168 +bool CompiledIC::is_icholder_call() const { 1.169 + assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.170 + return !_is_optimized && is_icholder_entry(ic_destination()); 1.171 +} 1.172 + 1.173 // Returns native address of 'call' instruction in inline-cache. Used by 1.174 // the InlineCacheBuffer when it needs to find the stub. 1.175 address CompiledIC::stub_address() const { 1.176 @@ -140,7 +183,6 @@ 1.177 methodHandle method = call_info->selected_method(); 1.178 bool is_invoke_interface = (bytecode == Bytecodes::_invokeinterface && !call_info->has_vtable_index()); 1.179 assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.180 - assert(method->is_oop(), "cannot be NULL and must be oop"); 1.181 assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic"); 1.182 assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?"); 1.183 1.184 @@ -149,7 +191,7 @@ 1.185 int index = klassItable::compute_itable_index(call_info->resolved_method()()); 1.186 entry = VtableStubs::create_stub(false, index, method()); 1.187 assert(entry != NULL, "entry not computed"); 1.188 - klassOop k = call_info->resolved_method()->method_holder(); 1.189 + Klass* k = call_info->resolved_method()->method_holder(); 1.190 assert(Klass::cast(k)->is_interface(), "sanity check"); 1.191 InlineCacheBuffer::create_transition_stub(this, k, entry); 1.192 } else { 1.193 @@ -180,7 +222,7 @@ 1.194 assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.195 assert(!is_optimized(), "an optimized call cannot be megamorphic"); 1.196 1.197 - // Cannot rely on cached_oop. It is either an interface or a method. 1.198 + // Cannot rely on cached_value. It is either an interface or a method. 1.199 return VtableStubs::is_entry_point(ic_destination()); 1.200 } 1.201 1.202 @@ -192,24 +234,16 @@ 1.203 // has been cleaned up 1.204 CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination()); 1.205 bool is_monomorphic = (cb != NULL && cb->is_nmethod()); 1.206 - // Check that the cached_oop is a klass for non-optimized monomorphic calls 1.207 + // Check that the cached_value is a klass for non-optimized monomorphic calls 1.208 // This assertion is invalid for compiler1: a call that does not look optimized (no static stub) can be used 1.209 - // for calling directly to vep without using the inline cache (i.e., cached_oop == NULL) 1.210 + // for calling directly to vep without using the inline cache (i.e., cached_value == NULL) 1.211 #ifdef ASSERT 1.212 -#ifdef TIERED 1.213 CodeBlob* caller = CodeCache::find_blob_unsafe(instruction_address()); 1.214 bool is_c1_method = caller->is_compiled_by_c1(); 1.215 -#else 1.216 -#ifdef COMPILER1 1.217 - bool is_c1_method = true; 1.218 -#else 1.219 - bool is_c1_method = false; 1.220 -#endif // COMPILER1 1.221 -#endif // TIERED 1.222 assert( is_c1_method || 1.223 !is_monomorphic || 1.224 is_optimized() || 1.225 - (cached_oop() != NULL && cached_oop()->is_klass()), "sanity check"); 1.226 + (cached_metadata() != NULL && cached_metadata()->is_klass()), "sanity check"); 1.227 #endif // ASSERT 1.228 return is_monomorphic; 1.229 } 1.230 @@ -226,7 +260,7 @@ 1.231 // is to the interpreter. 1.232 CodeBlob* cb = CodeCache::find_blob_unsafe(ic_destination()); 1.233 is_call_to_interpreted = (cb != NULL && cb->is_adapter_blob()); 1.234 - assert(!is_call_to_interpreted || (cached_oop() != NULL && cached_oop()->is_compiledICHolder()), "sanity check"); 1.235 + assert(!is_call_to_interpreted || (is_icholder_call() && cached_icholder() != NULL), "sanity check"); 1.236 } else { 1.237 // Check if we are calling into our own codeblob (i.e., to a stub) 1.238 CodeBlob* cb = CodeCache::find_blob(_ic_call->instruction_address()); 1.239 @@ -257,19 +291,22 @@ 1.240 entry = SharedRuntime::get_resolve_virtual_call_stub(); 1.241 } 1.242 1.243 - // A zombie transition will always be safe, since the oop has already been set to NULL, so 1.244 + // A zombie transition will always be safe, since the metadata has already been set to NULL, so 1.245 // we only need to patch the destination 1.246 bool safe_transition = is_optimized() || SafepointSynchronize::is_at_safepoint(); 1.247 1.248 if (safe_transition) { 1.249 - if (!is_optimized()) set_cached_oop(NULL); 1.250 // Kill any leftover stub we might have too 1.251 if (is_in_transition_state()) { 1.252 ICStub* old_stub = ICStub_from_destination_address(stub_address()); 1.253 old_stub->clear(); 1.254 } 1.255 + if (is_optimized()) { 1.256 set_ic_destination(entry); 1.257 } else { 1.258 + set_ic_destination_and_value(entry, (void*)NULL); 1.259 + } 1.260 + } else { 1.261 // Unsafe transition - create stub. 1.262 InlineCacheBuffer::create_transition_stub(this, NULL, entry); 1.263 } 1.264 @@ -289,12 +326,12 @@ 1.265 address dest = ic_destination(); 1.266 is_clean = dest == SharedRuntime::get_resolve_opt_virtual_call_stub() || 1.267 dest == SharedRuntime::get_resolve_virtual_call_stub(); 1.268 - assert(!is_clean || is_optimized() || cached_oop() == NULL, "sanity check"); 1.269 + assert(!is_clean || is_optimized() || cached_value() == NULL, "sanity check"); 1.270 return is_clean; 1.271 } 1.272 1.273 1.274 -void CompiledIC::set_to_monomorphic(const CompiledICInfo& info) { 1.275 +void CompiledIC::set_to_monomorphic(CompiledICInfo& info) { 1.276 assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), ""); 1.277 // Updating a cache to the wrong entry can cause bugs that are very hard 1.278 // to track down - if cache entry gets invalid - we just clean it. In 1.279 @@ -309,7 +346,7 @@ 1.280 // transitions are mt_safe 1.281 1.282 Thread *thread = Thread::current(); 1.283 - if (info._to_interpreter) { 1.284 + if (info.to_interpreter()) { 1.285 // Call to interpreter 1.286 if (info.is_optimized() && is_optimized()) { 1.287 assert(is_clean(), "unsafe IC path"); 1.288 @@ -318,9 +355,9 @@ 1.289 // (either because of CHA or the static target is final) 1.290 // At code generation time, this call has been emitted as static call 1.291 // Call via stub 1.292 - assert(info.cached_oop().not_null() && info.cached_oop()->is_method(), "sanity check"); 1.293 + assert(info.cached_metadata() != NULL && info.cached_metadata()->is_method(), "sanity check"); 1.294 CompiledStaticCall* csc = compiledStaticCall_at(instruction_address()); 1.295 - methodHandle method (thread, (methodOop)info.cached_oop()()); 1.296 + methodHandle method (thread, (Method*)info.cached_metadata()); 1.297 csc->set_to_interpreted(method, info.entry()); 1.298 if (TraceICs) { 1.299 ResourceMark rm(thread); 1.300 @@ -330,17 +367,15 @@ 1.301 } 1.302 } else { 1.303 // Call via method-klass-holder 1.304 - assert(info.cached_oop().not_null(), "must be set"); 1.305 - InlineCacheBuffer::create_transition_stub(this, info.cached_oop()(), info.entry()); 1.306 - 1.307 + InlineCacheBuffer::create_transition_stub(this, info.claim_cached_icholder(), info.entry()); 1.308 if (TraceICs) { 1.309 ResourceMark rm(thread); 1.310 - tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via mkh", instruction_address()); 1.311 + tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to interpreter via icholder ", instruction_address()); 1.312 } 1.313 } 1.314 } else { 1.315 // Call to compiled code 1.316 - bool static_bound = info.is_optimized() || (info.cached_oop().is_null()); 1.317 + bool static_bound = info.is_optimized() || (info.cached_metadata() == NULL); 1.318 #ifdef ASSERT 1.319 CodeBlob* cb = CodeCache::find_blob_unsafe(info.entry()); 1.320 assert (cb->is_nmethod(), "must be compiled!"); 1.321 @@ -352,18 +387,21 @@ 1.322 (!is_in_transition_state() && (info.is_optimized() || static_bound || is_clean())); 1.323 1.324 if (!safe) { 1.325 - InlineCacheBuffer::create_transition_stub(this, info.cached_oop()(), info.entry()); 1.326 + InlineCacheBuffer::create_transition_stub(this, info.cached_metadata(), info.entry()); 1.327 } else { 1.328 + if (is_optimized()) { 1.329 set_ic_destination(info.entry()); 1.330 - if (!is_optimized()) set_cached_oop(info.cached_oop()()); 1.331 + } else { 1.332 + set_ic_destination_and_value(info.entry(), info.cached_metadata()); 1.333 + } 1.334 } 1.335 1.336 if (TraceICs) { 1.337 ResourceMark rm(thread); 1.338 - assert(info.cached_oop() == NULL || info.cached_oop()()->is_klass(), "must be"); 1.339 + assert(info.cached_metadata() == NULL || info.cached_metadata()->is_klass(), "must be"); 1.340 tty->print_cr ("IC@" INTPTR_FORMAT ": monomorphic to compiled (rcvr klass) %s: %s", 1.341 instruction_address(), 1.342 - ((klassOop)info.cached_oop()())->print_value_string(), 1.343 + ((Klass*)info.cached_metadata())->print_value_string(), 1.344 (safe) ? "" : "via stub"); 1.345 } 1.346 } 1.347 @@ -386,8 +424,6 @@ 1.348 bool static_bound, 1.349 CompiledICInfo& info, 1.350 TRAPS) { 1.351 - info._is_optimized = is_optimized; 1.352 - 1.353 nmethod* method_code = method->code(); 1.354 address entry = NULL; 1.355 if (method_code != NULL) { 1.356 @@ -400,18 +436,12 @@ 1.357 } 1.358 if (entry != NULL) { 1.359 // Call to compiled code 1.360 - info._entry = entry; 1.361 - if (static_bound || is_optimized) { 1.362 - info._cached_oop = Handle(THREAD, (oop)NULL); 1.363 - } else { 1.364 - info._cached_oop = receiver_klass; 1.365 - } 1.366 - info._to_interpreter = false; 1.367 + info.set_compiled_entry(entry, (static_bound || is_optimized) ? NULL : receiver_klass(), is_optimized); 1.368 } else { 1.369 // Note: the following problem exists with Compiler1: 1.370 // - at compile time we may or may not know if the destination is final 1.371 // - if we know that the destination is final, we will emit an optimized 1.372 - // virtual call (no inline cache), and need a methodOop to make a call 1.373 + // virtual call (no inline cache), and need a Method* to make a call 1.374 // to the interpreter 1.375 // - if we do not know if the destination is final, we emit a standard 1.376 // virtual call, and use CompiledICHolder to call interpreted code 1.377 @@ -422,7 +452,6 @@ 1.378 // it look vanilla but is optimized. Code in is_call_to_interpreted 1.379 // is aware of this and weakens its asserts. 1.380 1.381 - info._to_interpreter = true; 1.382 // static_bound should imply is_optimized -- otherwise we have a 1.383 // performance bug (statically-bindable method is called via 1.384 // dynamically-dispatched call note: the reverse implication isn't 1.385 @@ -443,38 +472,46 @@ 1.386 #endif // COMPILER2 1.387 if (is_optimized) { 1.388 // Use stub entry 1.389 - info._entry = method()->get_c2i_entry(); 1.390 - info._cached_oop = method; 1.391 + info.set_interpreter_entry(method()->get_c2i_entry(), method()); 1.392 } else { 1.393 - // Use mkh entry 1.394 - oop holder = oopFactory::new_compiledICHolder(method, receiver_klass, CHECK); 1.395 - info._cached_oop = Handle(THREAD, holder); 1.396 - info._entry = method()->get_c2i_unverified_entry(); 1.397 + // Use icholder entry 1.398 + CompiledICHolder* holder = new CompiledICHolder(method(), receiver_klass()); 1.399 + info.set_icholder_entry(method()->get_c2i_unverified_entry(), holder); 1.400 } 1.401 } 1.402 + assert(info.is_optimized() == is_optimized, "must agree"); 1.403 } 1.404 1.405 1.406 -inline static RelocIterator parse_ic(nmethod* nm, address ic_call, oop* &_oop_addr, bool *is_optimized) { 1.407 - address first_oop = NULL; 1.408 - // Mergers please note: Sun SC5.x CC insists on an lvalue for a reference parameter. 1.409 - nmethod* tmp_nm = nm; 1.410 - return virtual_call_Relocation::parse_ic(tmp_nm, ic_call, first_oop, _oop_addr, is_optimized); 1.411 +bool CompiledIC::is_icholder_entry(address entry) { 1.412 + CodeBlob* cb = CodeCache::find_blob_unsafe(entry); 1.413 + return (cb != NULL && cb->is_adapter_blob()); 1.414 } 1.415 1.416 -CompiledIC::CompiledIC(NativeCall* ic_call) 1.417 - : _ic_call(ic_call), 1.418 - _oops(parse_ic(NULL, ic_call->instruction_address(), _oop_addr, &_is_optimized)) 1.419 + 1.420 +CompiledIC::CompiledIC(nmethod* nm, NativeCall* call) 1.421 + : _ic_call(call) 1.422 { 1.423 + address ic_call = call->instruction_address(); 1.424 + 1.425 + assert(ic_call != NULL, "ic_call address must be set"); 1.426 + assert(nm != NULL, "must pass nmethod"); 1.427 + assert(nm->contains(ic_call), "must be in nmethod"); 1.428 + 1.429 + // search for the ic_call at the given address 1.430 + RelocIterator iter(nm, ic_call, ic_call+1); 1.431 + bool ret = iter.next(); 1.432 + assert(ret == true, "relocInfo must exist at this address"); 1.433 + assert(iter.addr() == ic_call, "must find ic_call"); 1.434 + if (iter.type() == relocInfo::virtual_call_type) { 1.435 + virtual_call_Relocation* r = iter.virtual_call_reloc(); 1.436 + _is_optimized = false; 1.437 + _value = nativeMovConstReg_at(r->cached_value()); 1.438 + } else { 1.439 + assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call"); 1.440 + _is_optimized = true; 1.441 + _value = NULL; 1.442 } 1.443 - 1.444 - 1.445 -CompiledIC::CompiledIC(Relocation* ic_reloc) 1.446 - : _ic_call(nativeCall_at(ic_reloc->addr())), 1.447 - _oops(parse_ic(ic_reloc->code(), ic_reloc->addr(), _oop_addr, &_is_optimized)) 1.448 -{ 1.449 - assert(ic_reloc->type() == relocInfo::virtual_call_type || 1.450 - ic_reloc->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info"); 1.451 } 1.452 1.453 1.454 @@ -639,8 +676,8 @@ 1.455 1.456 1.457 void CompiledIC::print_compiled_ic() { 1.458 - tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT, 1.459 - instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination()); 1.460 + tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT, 1.461 + instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value()); 1.462 } 1.463 1.464