1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/src/share/vm/code/dependencies.cpp Wed Apr 27 01:25:04 2016 +0800 1.3 @@ -0,0 +1,1723 @@ 1.4 +/* 1.5 + * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved. 1.6 + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 1.7 + * 1.8 + * This code is free software; you can redistribute it and/or modify it 1.9 + * under the terms of the GNU General Public License version 2 only, as 1.10 + * published by the Free Software Foundation. 1.11 + * 1.12 + * This code is distributed in the hope that it will be useful, but WITHOUT 1.13 + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 1.14 + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 1.15 + * version 2 for more details (a copy is included in the LICENSE file that 1.16 + * accompanied this code). 1.17 + * 1.18 + * You should have received a copy of the GNU General Public License version 1.19 + * 2 along with this work; if not, write to the Free Software Foundation, 1.20 + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 1.21 + * 1.22 + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 1.23 + * or visit www.oracle.com if you need additional information or have any 1.24 + * questions. 1.25 + * 1.26 + */ 1.27 + 1.28 +#include "precompiled.hpp" 1.29 +#include "ci/ciArrayKlass.hpp" 1.30 +#include "ci/ciEnv.hpp" 1.31 +#include "ci/ciKlass.hpp" 1.32 +#include "ci/ciMethod.hpp" 1.33 +#include "code/dependencies.hpp" 1.34 +#include "compiler/compileLog.hpp" 1.35 +#include "oops/oop.inline.hpp" 1.36 +#include "runtime/handles.hpp" 1.37 +#include "runtime/handles.inline.hpp" 1.38 +#include "utilities/copy.hpp" 1.39 + 1.40 + 1.41 +#ifdef ASSERT 1.42 +static bool must_be_in_vm() { 1.43 + Thread* thread = Thread::current(); 1.44 + if (thread->is_Java_thread()) 1.45 + return ((JavaThread*)thread)->thread_state() == _thread_in_vm; 1.46 + else 1.47 + return true; //something like this: thread->is_VM_thread(); 1.48 +} 1.49 +#endif //ASSERT 1.50 + 1.51 +void Dependencies::initialize(ciEnv* env) { 1.52 + Arena* arena = env->arena(); 1.53 + _oop_recorder = env->oop_recorder(); 1.54 + _log = env->log(); 1.55 + _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0); 1.56 + DEBUG_ONLY(_deps[end_marker] = NULL); 1.57 + for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) { 1.58 + _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0); 1.59 + } 1.60 + _content_bytes = NULL; 1.61 + _size_in_bytes = (size_t)-1; 1.62 + 1.63 + assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity"); 1.64 +} 1.65 + 1.66 +void Dependencies::assert_evol_method(ciMethod* m) { 1.67 + assert_common_1(evol_method, m); 1.68 +} 1.69 + 1.70 +void Dependencies::assert_leaf_type(ciKlass* ctxk) { 1.71 + if (ctxk->is_array_klass()) { 1.72 + // As a special case, support this assertion on an array type, 1.73 + // which reduces to an assertion on its element type. 1.74 + // Note that this cannot be done with assertions that 1.75 + // relate to concreteness or abstractness. 1.76 + ciType* elemt = ctxk->as_array_klass()->base_element_type(); 1.77 + if (!elemt->is_instance_klass()) return; // Ex: int[][] 1.78 + ctxk = elemt->as_instance_klass(); 1.79 + //if (ctxk->is_final()) return; // Ex: String[][] 1.80 + } 1.81 + check_ctxk(ctxk); 1.82 + assert_common_1(leaf_type, ctxk); 1.83 +} 1.84 + 1.85 +void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) { 1.86 + check_ctxk_abstract(ctxk); 1.87 + assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck); 1.88 +} 1.89 + 1.90 +void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) { 1.91 + check_ctxk_abstract(ctxk); 1.92 + assert_common_1(abstract_with_no_concrete_subtype, ctxk); 1.93 +} 1.94 + 1.95 +void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) { 1.96 + check_ctxk_concrete(ctxk); 1.97 + assert_common_1(concrete_with_no_concrete_subtype, ctxk); 1.98 +} 1.99 + 1.100 +void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) { 1.101 + check_ctxk(ctxk); 1.102 + assert_common_2(unique_concrete_method, ctxk, uniqm); 1.103 +} 1.104 + 1.105 +void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) { 1.106 + check_ctxk(ctxk); 1.107 + assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2); 1.108 +} 1.109 + 1.110 +void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) { 1.111 + check_ctxk(ctxk); 1.112 + assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2); 1.113 +} 1.114 + 1.115 +void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) { 1.116 + check_ctxk(ctxk); 1.117 + assert_common_1(no_finalizable_subclasses, ctxk); 1.118 +} 1.119 + 1.120 +void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) { 1.121 + check_ctxk(call_site->klass()); 1.122 + assert_common_2(call_site_target_value, call_site, method_handle); 1.123 +} 1.124 + 1.125 +// Helper function. If we are adding a new dep. under ctxk2, 1.126 +// try to find an old dep. under a broader* ctxk1. If there is 1.127 +// 1.128 +bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps, 1.129 + int ctxk_i, ciKlass* ctxk2) { 1.130 + ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass(); 1.131 + if (ctxk2->is_subtype_of(ctxk1)) { 1.132 + return true; // success, and no need to change 1.133 + } else if (ctxk1->is_subtype_of(ctxk2)) { 1.134 + // new context class fully subsumes previous one 1.135 + deps->at_put(ctxk_i, ctxk2); 1.136 + return true; 1.137 + } else { 1.138 + return false; 1.139 + } 1.140 +} 1.141 + 1.142 +void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) { 1.143 + assert(dep_args(dept) == 1, "sanity"); 1.144 + log_dependency(dept, x); 1.145 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.146 + 1.147 + // see if the same (or a similar) dep is already recorded 1.148 + if (note_dep_seen(dept, x)) { 1.149 + assert(deps->find(x) >= 0, "sanity"); 1.150 + } else { 1.151 + deps->append(x); 1.152 + } 1.153 +} 1.154 + 1.155 +void Dependencies::assert_common_2(DepType dept, 1.156 + ciBaseObject* x0, ciBaseObject* x1) { 1.157 + assert(dep_args(dept) == 2, "sanity"); 1.158 + log_dependency(dept, x0, x1); 1.159 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.160 + 1.161 + // see if the same (or a similar) dep is already recorded 1.162 + bool has_ctxk = has_explicit_context_arg(dept); 1.163 + if (has_ctxk) { 1.164 + assert(dep_context_arg(dept) == 0, "sanity"); 1.165 + if (note_dep_seen(dept, x1)) { 1.166 + // look in this bucket for redundant assertions 1.167 + const int stride = 2; 1.168 + for (int i = deps->length(); (i -= stride) >= 0; ) { 1.169 + ciBaseObject* y1 = deps->at(i+1); 1.170 + if (x1 == y1) { // same subject; check the context 1.171 + if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) { 1.172 + return; 1.173 + } 1.174 + } 1.175 + } 1.176 + } 1.177 + } else { 1.178 + assert(dep_implicit_context_arg(dept) == 0, "sanity"); 1.179 + if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) { 1.180 + // look in this bucket for redundant assertions 1.181 + const int stride = 2; 1.182 + for (int i = deps->length(); (i -= stride) >= 0; ) { 1.183 + ciBaseObject* y0 = deps->at(i+0); 1.184 + ciBaseObject* y1 = deps->at(i+1); 1.185 + if (x0 == y0 && x1 == y1) { 1.186 + return; 1.187 + } 1.188 + } 1.189 + } 1.190 + } 1.191 + 1.192 + // append the assertion in the correct bucket: 1.193 + deps->append(x0); 1.194 + deps->append(x1); 1.195 +} 1.196 + 1.197 +void Dependencies::assert_common_3(DepType dept, 1.198 + ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) { 1.199 + assert(dep_context_arg(dept) == 0, "sanity"); 1.200 + assert(dep_args(dept) == 3, "sanity"); 1.201 + log_dependency(dept, ctxk, x, x2); 1.202 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.203 + 1.204 + // try to normalize an unordered pair: 1.205 + bool swap = false; 1.206 + switch (dept) { 1.207 + case abstract_with_exclusive_concrete_subtypes_2: 1.208 + swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk); 1.209 + break; 1.210 + case exclusive_concrete_methods_2: 1.211 + swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk); 1.212 + break; 1.213 + } 1.214 + if (swap) { ciBaseObject* t = x; x = x2; x2 = t; } 1.215 + 1.216 + // see if the same (or a similar) dep is already recorded 1.217 + if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) { 1.218 + // look in this bucket for redundant assertions 1.219 + const int stride = 3; 1.220 + for (int i = deps->length(); (i -= stride) >= 0; ) { 1.221 + ciBaseObject* y = deps->at(i+1); 1.222 + ciBaseObject* y2 = deps->at(i+2); 1.223 + if (x == y && x2 == y2) { // same subjects; check the context 1.224 + if (maybe_merge_ctxk(deps, i+0, ctxk)) { 1.225 + return; 1.226 + } 1.227 + } 1.228 + } 1.229 + } 1.230 + // append the assertion in the correct bucket: 1.231 + deps->append(ctxk); 1.232 + deps->append(x); 1.233 + deps->append(x2); 1.234 +} 1.235 + 1.236 +/// Support for encoding dependencies into an nmethod: 1.237 + 1.238 +void Dependencies::copy_to(nmethod* nm) { 1.239 + address beg = nm->dependencies_begin(); 1.240 + address end = nm->dependencies_end(); 1.241 + guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing"); 1.242 + Copy::disjoint_words((HeapWord*) content_bytes(), 1.243 + (HeapWord*) beg, 1.244 + size_in_bytes() / sizeof(HeapWord)); 1.245 + assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words"); 1.246 +} 1.247 + 1.248 +static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) { 1.249 + for (int i = 0; i < narg; i++) { 1.250 + int diff = p1[i]->ident() - p2[i]->ident(); 1.251 + if (diff != 0) return diff; 1.252 + } 1.253 + return 0; 1.254 +} 1.255 +static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2) 1.256 +{ return sort_dep(p1, p2, 1); } 1.257 +static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2) 1.258 +{ return sort_dep(p1, p2, 2); } 1.259 +static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2) 1.260 +{ return sort_dep(p1, p2, 3); } 1.261 + 1.262 +void Dependencies::sort_all_deps() { 1.263 + for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 1.264 + DepType dept = (DepType)deptv; 1.265 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.266 + if (deps->length() <= 1) continue; 1.267 + switch (dep_args(dept)) { 1.268 + case 1: deps->sort(sort_dep_arg_1, 1); break; 1.269 + case 2: deps->sort(sort_dep_arg_2, 2); break; 1.270 + case 3: deps->sort(sort_dep_arg_3, 3); break; 1.271 + default: ShouldNotReachHere(); 1.272 + } 1.273 + } 1.274 +} 1.275 + 1.276 +size_t Dependencies::estimate_size_in_bytes() { 1.277 + size_t est_size = 100; 1.278 + for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 1.279 + DepType dept = (DepType)deptv; 1.280 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.281 + est_size += deps->length()*2; // tags and argument(s) 1.282 + } 1.283 + return est_size; 1.284 +} 1.285 + 1.286 +ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) { 1.287 + switch (dept) { 1.288 + case abstract_with_exclusive_concrete_subtypes_2: 1.289 + return x->as_metadata()->as_klass(); 1.290 + case unique_concrete_method: 1.291 + case exclusive_concrete_methods_2: 1.292 + return x->as_metadata()->as_method()->holder(); 1.293 + } 1.294 + return NULL; // let NULL be NULL 1.295 +} 1.296 + 1.297 +Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) { 1.298 + assert(must_be_in_vm(), "raw oops here"); 1.299 + switch (dept) { 1.300 + case abstract_with_exclusive_concrete_subtypes_2: 1.301 + assert(x->is_klass(), "sanity"); 1.302 + return (Klass*) x; 1.303 + case unique_concrete_method: 1.304 + case exclusive_concrete_methods_2: 1.305 + assert(x->is_method(), "sanity"); 1.306 + return ((Method*)x)->method_holder(); 1.307 + } 1.308 + return NULL; // let NULL be NULL 1.309 +} 1.310 + 1.311 +void Dependencies::encode_content_bytes() { 1.312 + sort_all_deps(); 1.313 + 1.314 + // cast is safe, no deps can overflow INT_MAX 1.315 + CompressedWriteStream bytes((int)estimate_size_in_bytes()); 1.316 + 1.317 + for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 1.318 + DepType dept = (DepType)deptv; 1.319 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.320 + if (deps->length() == 0) continue; 1.321 + int stride = dep_args(dept); 1.322 + int ctxkj = dep_context_arg(dept); // -1 if no context arg 1.323 + assert(stride > 0, "sanity"); 1.324 + for (int i = 0; i < deps->length(); i += stride) { 1.325 + jbyte code_byte = (jbyte)dept; 1.326 + int skipj = -1; 1.327 + if (ctxkj >= 0 && ctxkj+1 < stride) { 1.328 + ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass(); 1.329 + ciBaseObject* x = deps->at(i+ctxkj+1); // following argument 1.330 + if (ctxk == ctxk_encoded_as_null(dept, x)) { 1.331 + skipj = ctxkj; // we win: maybe one less oop to keep track of 1.332 + code_byte |= default_context_type_bit; 1.333 + } 1.334 + } 1.335 + bytes.write_byte(code_byte); 1.336 + for (int j = 0; j < stride; j++) { 1.337 + if (j == skipj) continue; 1.338 + ciBaseObject* v = deps->at(i+j); 1.339 + int idx; 1.340 + if (v->is_object()) { 1.341 + idx = _oop_recorder->find_index(v->as_object()->constant_encoding()); 1.342 + } else { 1.343 + ciMetadata* meta = v->as_metadata(); 1.344 + idx = _oop_recorder->find_index(meta->constant_encoding()); 1.345 + } 1.346 + bytes.write_int(idx); 1.347 + } 1.348 + } 1.349 + } 1.350 + 1.351 + // write a sentinel byte to mark the end 1.352 + bytes.write_byte(end_marker); 1.353 + 1.354 + // round it out to a word boundary 1.355 + while (bytes.position() % sizeof(HeapWord) != 0) { 1.356 + bytes.write_byte(end_marker); 1.357 + } 1.358 + 1.359 + // check whether the dept byte encoding really works 1.360 + assert((jbyte)default_context_type_bit != 0, "byte overflow"); 1.361 + 1.362 + _content_bytes = bytes.buffer(); 1.363 + _size_in_bytes = bytes.position(); 1.364 +} 1.365 + 1.366 + 1.367 +const char* Dependencies::_dep_name[TYPE_LIMIT] = { 1.368 + "end_marker", 1.369 + "evol_method", 1.370 + "leaf_type", 1.371 + "abstract_with_unique_concrete_subtype", 1.372 + "abstract_with_no_concrete_subtype", 1.373 + "concrete_with_no_concrete_subtype", 1.374 + "unique_concrete_method", 1.375 + "abstract_with_exclusive_concrete_subtypes_2", 1.376 + "exclusive_concrete_methods_2", 1.377 + "no_finalizable_subclasses", 1.378 + "call_site_target_value" 1.379 +}; 1.380 + 1.381 +int Dependencies::_dep_args[TYPE_LIMIT] = { 1.382 + -1,// end_marker 1.383 + 1, // evol_method m 1.384 + 1, // leaf_type ctxk 1.385 + 2, // abstract_with_unique_concrete_subtype ctxk, k 1.386 + 1, // abstract_with_no_concrete_subtype ctxk 1.387 + 1, // concrete_with_no_concrete_subtype ctxk 1.388 + 2, // unique_concrete_method ctxk, m 1.389 + 3, // unique_concrete_subtypes_2 ctxk, k1, k2 1.390 + 3, // unique_concrete_methods_2 ctxk, m1, m2 1.391 + 1, // no_finalizable_subclasses ctxk 1.392 + 2 // call_site_target_value call_site, method_handle 1.393 +}; 1.394 + 1.395 +const char* Dependencies::dep_name(Dependencies::DepType dept) { 1.396 + if (!dept_in_mask(dept, all_types)) return "?bad-dep?"; 1.397 + return _dep_name[dept]; 1.398 +} 1.399 + 1.400 +int Dependencies::dep_args(Dependencies::DepType dept) { 1.401 + if (!dept_in_mask(dept, all_types)) return -1; 1.402 + return _dep_args[dept]; 1.403 +} 1.404 + 1.405 +void Dependencies::check_valid_dependency_type(DepType dept) { 1.406 + guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept)); 1.407 +} 1.408 + 1.409 +// for the sake of the compiler log, print out current dependencies: 1.410 +void Dependencies::log_all_dependencies() { 1.411 + if (log() == NULL) return; 1.412 + ciBaseObject* args[max_arg_count]; 1.413 + for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { 1.414 + DepType dept = (DepType)deptv; 1.415 + GrowableArray<ciBaseObject*>* deps = _deps[dept]; 1.416 + if (deps->length() == 0) continue; 1.417 + int stride = dep_args(dept); 1.418 + for (int i = 0; i < deps->length(); i += stride) { 1.419 + for (int j = 0; j < stride; j++) { 1.420 + // flush out the identities before printing 1.421 + args[j] = deps->at(i+j); 1.422 + } 1.423 + write_dependency_to(log(), dept, stride, args); 1.424 + } 1.425 + } 1.426 +} 1.427 + 1.428 +void Dependencies::write_dependency_to(CompileLog* log, 1.429 + DepType dept, 1.430 + int nargs, DepArgument args[], 1.431 + Klass* witness) { 1.432 + if (log == NULL) { 1.433 + return; 1.434 + } 1.435 + ciEnv* env = ciEnv::current(); 1.436 + ciBaseObject* ciargs[max_arg_count]; 1.437 + assert(nargs <= max_arg_count, "oob"); 1.438 + for (int j = 0; j < nargs; j++) { 1.439 + if (args[j].is_oop()) { 1.440 + ciargs[j] = env->get_object(args[j].oop_value()); 1.441 + } else { 1.442 + ciargs[j] = env->get_metadata(args[j].metadata_value()); 1.443 + } 1.444 + } 1.445 + Dependencies::write_dependency_to(log, dept, nargs, ciargs, witness); 1.446 +} 1.447 + 1.448 +void Dependencies::write_dependency_to(CompileLog* log, 1.449 + DepType dept, 1.450 + int nargs, ciBaseObject* args[], 1.451 + Klass* witness) { 1.452 + if (log == NULL) return; 1.453 + assert(nargs <= max_arg_count, "oob"); 1.454 + int argids[max_arg_count]; 1.455 + int ctxkj = dep_context_arg(dept); // -1 if no context arg 1.456 + int j; 1.457 + for (j = 0; j < nargs; j++) { 1.458 + if (args[j]->is_object()) { 1.459 + argids[j] = log->identify(args[j]->as_object()); 1.460 + } else { 1.461 + argids[j] = log->identify(args[j]->as_metadata()); 1.462 + } 1.463 + } 1.464 + if (witness != NULL) { 1.465 + log->begin_elem("dependency_failed"); 1.466 + } else { 1.467 + log->begin_elem("dependency"); 1.468 + } 1.469 + log->print(" type='%s'", dep_name(dept)); 1.470 + if (ctxkj >= 0) { 1.471 + log->print(" ctxk='%d'", argids[ctxkj]); 1.472 + } 1.473 + // write remaining arguments, if any. 1.474 + for (j = 0; j < nargs; j++) { 1.475 + if (j == ctxkj) continue; // already logged 1.476 + if (j == 1) { 1.477 + log->print( " x='%d'", argids[j]); 1.478 + } else { 1.479 + log->print(" x%d='%d'", j, argids[j]); 1.480 + } 1.481 + } 1.482 + if (witness != NULL) { 1.483 + log->object("witness", witness); 1.484 + log->stamp(); 1.485 + } 1.486 + log->end_elem(); 1.487 +} 1.488 + 1.489 +void Dependencies::write_dependency_to(xmlStream* xtty, 1.490 + DepType dept, 1.491 + int nargs, DepArgument args[], 1.492 + Klass* witness) { 1.493 + if (xtty == NULL) return; 1.494 + ttyLocker ttyl; 1.495 + int ctxkj = dep_context_arg(dept); // -1 if no context arg 1.496 + if (witness != NULL) { 1.497 + xtty->begin_elem("dependency_failed"); 1.498 + } else { 1.499 + xtty->begin_elem("dependency"); 1.500 + } 1.501 + xtty->print(" type='%s'", dep_name(dept)); 1.502 + if (ctxkj >= 0) { 1.503 + xtty->object("ctxk", args[ctxkj].metadata_value()); 1.504 + } 1.505 + // write remaining arguments, if any. 1.506 + for (int j = 0; j < nargs; j++) { 1.507 + if (j == ctxkj) continue; // already logged 1.508 + if (j == 1) { 1.509 + if (args[j].is_oop()) { 1.510 + xtty->object("x", args[j].oop_value()); 1.511 + } else { 1.512 + xtty->object("x", args[j].metadata_value()); 1.513 + } 1.514 + } else { 1.515 + char xn[10]; sprintf(xn, "x%d", j); 1.516 + if (args[j].is_oop()) { 1.517 + xtty->object(xn, args[j].oop_value()); 1.518 + } else { 1.519 + xtty->object(xn, args[j].metadata_value()); 1.520 + } 1.521 + } 1.522 + } 1.523 + if (witness != NULL) { 1.524 + xtty->object("witness", witness); 1.525 + xtty->stamp(); 1.526 + } 1.527 + xtty->end_elem(); 1.528 +} 1.529 + 1.530 +void Dependencies::print_dependency(DepType dept, int nargs, DepArgument args[], 1.531 + Klass* witness) { 1.532 + ResourceMark rm; 1.533 + ttyLocker ttyl; // keep the following output all in one block 1.534 + tty->print_cr("%s of type %s", 1.535 + (witness == NULL)? "Dependency": "Failed dependency", 1.536 + dep_name(dept)); 1.537 + // print arguments 1.538 + int ctxkj = dep_context_arg(dept); // -1 if no context arg 1.539 + for (int j = 0; j < nargs; j++) { 1.540 + DepArgument arg = args[j]; 1.541 + bool put_star = false; 1.542 + if (arg.is_null()) continue; 1.543 + const char* what; 1.544 + if (j == ctxkj) { 1.545 + assert(arg.is_metadata(), "must be"); 1.546 + what = "context"; 1.547 + put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value()); 1.548 + } else if (arg.is_method()) { 1.549 + what = "method "; 1.550 + put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value()); 1.551 + } else if (arg.is_klass()) { 1.552 + what = "class "; 1.553 + } else { 1.554 + what = "object "; 1.555 + } 1.556 + tty->print(" %s = %s", what, (put_star? "*": "")); 1.557 + if (arg.is_klass()) 1.558 + tty->print("%s", ((Klass*)arg.metadata_value())->external_name()); 1.559 + else if (arg.is_method()) 1.560 + ((Method*)arg.metadata_value())->print_value(); 1.561 + else 1.562 + ShouldNotReachHere(); // Provide impl for this type. 1.563 + tty->cr(); 1.564 + } 1.565 + if (witness != NULL) { 1.566 + bool put_star = !Dependencies::is_concrete_klass(witness); 1.567 + tty->print_cr(" witness = %s%s", 1.568 + (put_star? "*": ""), 1.569 + witness->external_name()); 1.570 + } 1.571 +} 1.572 + 1.573 +void Dependencies::DepStream::log_dependency(Klass* witness) { 1.574 + if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime 1.575 + ResourceMark rm; 1.576 + int nargs = argument_count(); 1.577 + DepArgument args[max_arg_count]; 1.578 + for (int j = 0; j < nargs; j++) { 1.579 + if (type() == call_site_target_value) { 1.580 + args[j] = argument_oop(j); 1.581 + } else { 1.582 + args[j] = argument(j); 1.583 + } 1.584 + } 1.585 + if (_deps != NULL && _deps->log() != NULL) { 1.586 + Dependencies::write_dependency_to(_deps->log(), 1.587 + type(), nargs, args, witness); 1.588 + } else { 1.589 + Dependencies::write_dependency_to(xtty, 1.590 + type(), nargs, args, witness); 1.591 + } 1.592 +} 1.593 + 1.594 +void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) { 1.595 + int nargs = argument_count(); 1.596 + DepArgument args[max_arg_count]; 1.597 + for (int j = 0; j < nargs; j++) { 1.598 + args[j] = argument(j); 1.599 + } 1.600 + Dependencies::print_dependency(type(), nargs, args, witness); 1.601 + if (verbose) { 1.602 + if (_code != NULL) { 1.603 + tty->print(" code: "); 1.604 + _code->print_value_on(tty); 1.605 + tty->cr(); 1.606 + } 1.607 + } 1.608 +} 1.609 + 1.610 + 1.611 +/// Dependency stream support (decodes dependencies from an nmethod): 1.612 + 1.613 +#ifdef ASSERT 1.614 +void Dependencies::DepStream::initial_asserts(size_t byte_limit) { 1.615 + assert(must_be_in_vm(), "raw oops here"); 1.616 + _byte_limit = byte_limit; 1.617 + _type = (DepType)(end_marker-1); // defeat "already at end" assert 1.618 + assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other"); 1.619 +} 1.620 +#endif //ASSERT 1.621 + 1.622 +bool Dependencies::DepStream::next() { 1.623 + assert(_type != end_marker, "already at end"); 1.624 + if (_bytes.position() == 0 && _code != NULL 1.625 + && _code->dependencies_size() == 0) { 1.626 + // Method has no dependencies at all. 1.627 + return false; 1.628 + } 1.629 + int code_byte = (_bytes.read_byte() & 0xFF); 1.630 + if (code_byte == end_marker) { 1.631 + DEBUG_ONLY(_type = end_marker); 1.632 + return false; 1.633 + } else { 1.634 + int ctxk_bit = (code_byte & Dependencies::default_context_type_bit); 1.635 + code_byte -= ctxk_bit; 1.636 + DepType dept = (DepType)code_byte; 1.637 + _type = dept; 1.638 + Dependencies::check_valid_dependency_type(dept); 1.639 + int stride = _dep_args[dept]; 1.640 + assert(stride == dep_args(dept), "sanity"); 1.641 + int skipj = -1; 1.642 + if (ctxk_bit != 0) { 1.643 + skipj = 0; // currently the only context argument is at zero 1.644 + assert(skipj == dep_context_arg(dept), "zero arg always ctxk"); 1.645 + } 1.646 + for (int j = 0; j < stride; j++) { 1.647 + _xi[j] = (j == skipj)? 0: _bytes.read_int(); 1.648 + } 1.649 + DEBUG_ONLY(_xi[stride] = -1); // help detect overruns 1.650 + return true; 1.651 + } 1.652 +} 1.653 + 1.654 +inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) { 1.655 + Metadata* o = NULL; 1.656 + if (_code != NULL) { 1.657 + o = _code->metadata_at(i); 1.658 + } else { 1.659 + o = _deps->oop_recorder()->metadata_at(i); 1.660 + } 1.661 + return o; 1.662 +} 1.663 + 1.664 +inline oop Dependencies::DepStream::recorded_oop_at(int i) { 1.665 + return (_code != NULL) 1.666 + ? _code->oop_at(i) 1.667 + : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i)); 1.668 +} 1.669 + 1.670 +Metadata* Dependencies::DepStream::argument(int i) { 1.671 + Metadata* result = recorded_metadata_at(argument_index(i)); 1.672 + 1.673 + if (result == NULL) { // Explicit context argument can be compressed 1.674 + int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg 1.675 + if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) { 1.676 + result = ctxk_encoded_as_null(type(), argument(ctxkj+1)); 1.677 + } 1.678 + } 1.679 + 1.680 + assert(result == NULL || result->is_klass() || result->is_method(), "must be"); 1.681 + return result; 1.682 +} 1.683 + 1.684 +oop Dependencies::DepStream::argument_oop(int i) { 1.685 + oop result = recorded_oop_at(argument_index(i)); 1.686 + assert(result == NULL || result->is_oop(), "must be"); 1.687 + return result; 1.688 +} 1.689 + 1.690 +Klass* Dependencies::DepStream::context_type() { 1.691 + assert(must_be_in_vm(), "raw oops here"); 1.692 + 1.693 + // Most dependencies have an explicit context type argument. 1.694 + { 1.695 + int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg 1.696 + if (ctxkj >= 0) { 1.697 + Metadata* k = argument(ctxkj); 1.698 + assert(k != NULL && k->is_klass(), "type check"); 1.699 + return (Klass*)k; 1.700 + } 1.701 + } 1.702 + 1.703 + // Some dependencies are using the klass of the first object 1.704 + // argument as implicit context type (e.g. call_site_target_value). 1.705 + { 1.706 + int ctxkj = dep_implicit_context_arg(type()); 1.707 + if (ctxkj >= 0) { 1.708 + Klass* k = argument_oop(ctxkj)->klass(); 1.709 + assert(k != NULL && k->is_klass(), "type check"); 1.710 + return (Klass*) k; 1.711 + } 1.712 + } 1.713 + 1.714 + // And some dependencies don't have a context type at all, 1.715 + // e.g. evol_method. 1.716 + return NULL; 1.717 +} 1.718 + 1.719 +/// Checking dependencies: 1.720 + 1.721 +// This hierarchy walker inspects subtypes of a given type, 1.722 +// trying to find a "bad" class which breaks a dependency. 1.723 +// Such a class is called a "witness" to the broken dependency. 1.724 +// While searching around, we ignore "participants", which 1.725 +// are already known to the dependency. 1.726 +class ClassHierarchyWalker { 1.727 + public: 1.728 + enum { PARTICIPANT_LIMIT = 3 }; 1.729 + 1.730 + private: 1.731 + // optional method descriptor to check for: 1.732 + Symbol* _name; 1.733 + Symbol* _signature; 1.734 + 1.735 + // special classes which are not allowed to be witnesses: 1.736 + Klass* _participants[PARTICIPANT_LIMIT+1]; 1.737 + int _num_participants; 1.738 + 1.739 + // cache of method lookups 1.740 + Method* _found_methods[PARTICIPANT_LIMIT+1]; 1.741 + 1.742 + // if non-zero, tells how many witnesses to convert to participants 1.743 + int _record_witnesses; 1.744 + 1.745 + void initialize(Klass* participant) { 1.746 + _record_witnesses = 0; 1.747 + _participants[0] = participant; 1.748 + _found_methods[0] = NULL; 1.749 + _num_participants = 0; 1.750 + if (participant != NULL) { 1.751 + // Terminating NULL. 1.752 + _participants[1] = NULL; 1.753 + _found_methods[1] = NULL; 1.754 + _num_participants = 1; 1.755 + } 1.756 + } 1.757 + 1.758 + void initialize_from_method(Method* m) { 1.759 + assert(m != NULL && m->is_method(), "sanity"); 1.760 + _name = m->name(); 1.761 + _signature = m->signature(); 1.762 + } 1.763 + 1.764 + public: 1.765 + // The walker is initialized to recognize certain methods and/or types 1.766 + // as friendly participants. 1.767 + ClassHierarchyWalker(Klass* participant, Method* m) { 1.768 + initialize_from_method(m); 1.769 + initialize(participant); 1.770 + } 1.771 + ClassHierarchyWalker(Method* m) { 1.772 + initialize_from_method(m); 1.773 + initialize(NULL); 1.774 + } 1.775 + ClassHierarchyWalker(Klass* participant = NULL) { 1.776 + _name = NULL; 1.777 + _signature = NULL; 1.778 + initialize(participant); 1.779 + } 1.780 + 1.781 + // This is common code for two searches: One for concrete subtypes, 1.782 + // the other for concrete method implementations and overrides. 1.783 + bool doing_subtype_search() { 1.784 + return _name == NULL; 1.785 + } 1.786 + 1.787 + int num_participants() { return _num_participants; } 1.788 + Klass* participant(int n) { 1.789 + assert((uint)n <= (uint)_num_participants, "oob"); 1.790 + return _participants[n]; 1.791 + } 1.792 + 1.793 + // Note: If n==num_participants, returns NULL. 1.794 + Method* found_method(int n) { 1.795 + assert((uint)n <= (uint)_num_participants, "oob"); 1.796 + Method* fm = _found_methods[n]; 1.797 + assert(n == _num_participants || fm != NULL, "proper usage"); 1.798 + assert(fm == NULL || fm->method_holder() == _participants[n], "sanity"); 1.799 + return fm; 1.800 + } 1.801 + 1.802 +#ifdef ASSERT 1.803 + // Assert that m is inherited into ctxk, without intervening overrides. 1.804 + // (May return true even if this is not true, in corner cases where we punt.) 1.805 + bool check_method_context(Klass* ctxk, Method* m) { 1.806 + if (m->method_holder() == ctxk) 1.807 + return true; // Quick win. 1.808 + if (m->is_private()) 1.809 + return false; // Quick lose. Should not happen. 1.810 + if (!(m->is_public() || m->is_protected())) 1.811 + // The override story is complex when packages get involved. 1.812 + return true; // Must punt the assertion to true. 1.813 + Klass* k = ctxk; 1.814 + Method* lm = k->lookup_method(m->name(), m->signature()); 1.815 + if (lm == NULL && k->oop_is_instance()) { 1.816 + // It might be an interface method 1.817 + lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(), 1.818 + m->signature()); 1.819 + } 1.820 + if (lm == m) 1.821 + // Method m is inherited into ctxk. 1.822 + return true; 1.823 + if (lm != NULL) { 1.824 + if (!(lm->is_public() || lm->is_protected())) { 1.825 + // Method is [package-]private, so the override story is complex. 1.826 + return true; // Must punt the assertion to true. 1.827 + } 1.828 + if (lm->is_static()) { 1.829 + // Static methods don't override non-static so punt 1.830 + return true; 1.831 + } 1.832 + if ( !Dependencies::is_concrete_method(lm) 1.833 + && !Dependencies::is_concrete_method(m) 1.834 + && lm->method_holder()->is_subtype_of(m->method_holder())) 1.835 + // Method m is overridden by lm, but both are non-concrete. 1.836 + return true; 1.837 + } 1.838 + ResourceMark rm; 1.839 + tty->print_cr("Dependency method not found in the associated context:"); 1.840 + tty->print_cr(" context = %s", ctxk->external_name()); 1.841 + tty->print( " method = "); m->print_short_name(tty); tty->cr(); 1.842 + if (lm != NULL) { 1.843 + tty->print( " found = "); lm->print_short_name(tty); tty->cr(); 1.844 + } 1.845 + return false; 1.846 + } 1.847 +#endif 1.848 + 1.849 + void add_participant(Klass* participant) { 1.850 + assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob"); 1.851 + int np = _num_participants++; 1.852 + _participants[np] = participant; 1.853 + _participants[np+1] = NULL; 1.854 + _found_methods[np+1] = NULL; 1.855 + } 1.856 + 1.857 + void record_witnesses(int add) { 1.858 + if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT; 1.859 + assert(_num_participants + add < PARTICIPANT_LIMIT, "oob"); 1.860 + _record_witnesses = add; 1.861 + } 1.862 + 1.863 + bool is_witness(Klass* k) { 1.864 + if (doing_subtype_search()) { 1.865 + return Dependencies::is_concrete_klass(k); 1.866 + } else { 1.867 + Method* m = InstanceKlass::cast(k)->find_method(_name, _signature); 1.868 + if (m == NULL || !Dependencies::is_concrete_method(m)) return false; 1.869 + _found_methods[_num_participants] = m; 1.870 + // Note: If add_participant(k) is called, 1.871 + // the method m will already be memoized for it. 1.872 + return true; 1.873 + } 1.874 + } 1.875 + 1.876 + bool is_participant(Klass* k) { 1.877 + if (k == _participants[0]) { 1.878 + return true; 1.879 + } else if (_num_participants <= 1) { 1.880 + return false; 1.881 + } else { 1.882 + return in_list(k, &_participants[1]); 1.883 + } 1.884 + } 1.885 + bool ignore_witness(Klass* witness) { 1.886 + if (_record_witnesses == 0) { 1.887 + return false; 1.888 + } else { 1.889 + --_record_witnesses; 1.890 + add_participant(witness); 1.891 + return true; 1.892 + } 1.893 + } 1.894 + static bool in_list(Klass* x, Klass** list) { 1.895 + for (int i = 0; ; i++) { 1.896 + Klass* y = list[i]; 1.897 + if (y == NULL) break; 1.898 + if (y == x) return true; 1.899 + } 1.900 + return false; // not in list 1.901 + } 1.902 + 1.903 + private: 1.904 + // the actual search method: 1.905 + Klass* find_witness_anywhere(Klass* context_type, 1.906 + bool participants_hide_witnesses, 1.907 + bool top_level_call = true); 1.908 + // the spot-checking version: 1.909 + Klass* find_witness_in(KlassDepChange& changes, 1.910 + Klass* context_type, 1.911 + bool participants_hide_witnesses); 1.912 + public: 1.913 + Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) { 1.914 + assert(doing_subtype_search(), "must set up a subtype search"); 1.915 + // When looking for unexpected concrete types, 1.916 + // do not look beneath expected ones. 1.917 + const bool participants_hide_witnesses = true; 1.918 + // CX > CC > C' is OK, even if C' is new. 1.919 + // CX > { CC, C' } is not OK if C' is new, and C' is the witness. 1.920 + if (changes != NULL) { 1.921 + return find_witness_in(*changes, context_type, participants_hide_witnesses); 1.922 + } else { 1.923 + return find_witness_anywhere(context_type, participants_hide_witnesses); 1.924 + } 1.925 + } 1.926 + Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) { 1.927 + assert(!doing_subtype_search(), "must set up a method definer search"); 1.928 + // When looking for unexpected concrete methods, 1.929 + // look beneath expected ones, to see if there are overrides. 1.930 + const bool participants_hide_witnesses = true; 1.931 + // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness. 1.932 + if (changes != NULL) { 1.933 + return find_witness_in(*changes, context_type, !participants_hide_witnesses); 1.934 + } else { 1.935 + return find_witness_anywhere(context_type, !participants_hide_witnesses); 1.936 + } 1.937 + } 1.938 +}; 1.939 + 1.940 +#ifndef PRODUCT 1.941 +static int deps_find_witness_calls = 0; 1.942 +static int deps_find_witness_steps = 0; 1.943 +static int deps_find_witness_recursions = 0; 1.944 +static int deps_find_witness_singles = 0; 1.945 +static int deps_find_witness_print = 0; // set to -1 to force a final print 1.946 +static bool count_find_witness_calls() { 1.947 + if (TraceDependencies || LogCompilation) { 1.948 + int pcount = deps_find_witness_print + 1; 1.949 + bool final_stats = (pcount == 0); 1.950 + bool initial_call = (pcount == 1); 1.951 + bool occasional_print = ((pcount & ((1<<10) - 1)) == 0); 1.952 + if (pcount < 0) pcount = 1; // crude overflow protection 1.953 + deps_find_witness_print = pcount; 1.954 + if (VerifyDependencies && initial_call) { 1.955 + tty->print_cr("Warning: TraceDependencies results may be inflated by VerifyDependencies"); 1.956 + } 1.957 + if (occasional_print || final_stats) { 1.958 + // Every now and then dump a little info about dependency searching. 1.959 + if (xtty != NULL) { 1.960 + ttyLocker ttyl; 1.961 + xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'", 1.962 + deps_find_witness_calls, 1.963 + deps_find_witness_steps, 1.964 + deps_find_witness_recursions, 1.965 + deps_find_witness_singles); 1.966 + } 1.967 + if (final_stats || (TraceDependencies && WizardMode)) { 1.968 + ttyLocker ttyl; 1.969 + tty->print_cr("Dependency check (find_witness) " 1.970 + "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d", 1.971 + deps_find_witness_calls, 1.972 + deps_find_witness_steps, 1.973 + (double)deps_find_witness_steps / deps_find_witness_calls, 1.974 + deps_find_witness_recursions, 1.975 + deps_find_witness_singles); 1.976 + } 1.977 + } 1.978 + return true; 1.979 + } 1.980 + return false; 1.981 +} 1.982 +#else 1.983 +#define count_find_witness_calls() (0) 1.984 +#endif //PRODUCT 1.985 + 1.986 + 1.987 +Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes, 1.988 + Klass* context_type, 1.989 + bool participants_hide_witnesses) { 1.990 + assert(changes.involves_context(context_type), "irrelevant dependency"); 1.991 + Klass* new_type = changes.new_type(); 1.992 + 1.993 + (void)count_find_witness_calls(); 1.994 + NOT_PRODUCT(deps_find_witness_singles++); 1.995 + 1.996 + // Current thread must be in VM (not native mode, as in CI): 1.997 + assert(must_be_in_vm(), "raw oops here"); 1.998 + // Must not move the class hierarchy during this check: 1.999 + assert_locked_or_safepoint(Compile_lock); 1.1000 + 1.1001 + int nof_impls = InstanceKlass::cast(context_type)->nof_implementors(); 1.1002 + if (nof_impls > 1) { 1.1003 + // Avoid this case: *I.m > { A.m, C }; B.m > C 1.1004 + // %%% Until this is fixed more systematically, bail out. 1.1005 + // See corresponding comment in find_witness_anywhere. 1.1006 + return context_type; 1.1007 + } 1.1008 + 1.1009 + assert(!is_participant(new_type), "only old classes are participants"); 1.1010 + if (participants_hide_witnesses) { 1.1011 + // If the new type is a subtype of a participant, we are done. 1.1012 + for (int i = 0; i < num_participants(); i++) { 1.1013 + Klass* part = participant(i); 1.1014 + if (part == NULL) continue; 1.1015 + assert(changes.involves_context(part) == new_type->is_subtype_of(part), 1.1016 + "correct marking of participants, b/c new_type is unique"); 1.1017 + if (changes.involves_context(part)) { 1.1018 + // new guy is protected from this check by previous participant 1.1019 + return NULL; 1.1020 + } 1.1021 + } 1.1022 + } 1.1023 + 1.1024 + if (is_witness(new_type) && 1.1025 + !ignore_witness(new_type)) { 1.1026 + return new_type; 1.1027 + } 1.1028 + 1.1029 + return NULL; 1.1030 +} 1.1031 + 1.1032 + 1.1033 +// Walk hierarchy under a context type, looking for unexpected types. 1.1034 +// Do not report participant types, and recursively walk beneath 1.1035 +// them only if participants_hide_witnesses is false. 1.1036 +// If top_level_call is false, skip testing the context type, 1.1037 +// because the caller has already considered it. 1.1038 +Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type, 1.1039 + bool participants_hide_witnesses, 1.1040 + bool top_level_call) { 1.1041 + // Current thread must be in VM (not native mode, as in CI): 1.1042 + assert(must_be_in_vm(), "raw oops here"); 1.1043 + // Must not move the class hierarchy during this check: 1.1044 + assert_locked_or_safepoint(Compile_lock); 1.1045 + 1.1046 + bool do_counts = count_find_witness_calls(); 1.1047 + 1.1048 + // Check the root of the sub-hierarchy first. 1.1049 + if (top_level_call) { 1.1050 + if (do_counts) { 1.1051 + NOT_PRODUCT(deps_find_witness_calls++); 1.1052 + NOT_PRODUCT(deps_find_witness_steps++); 1.1053 + } 1.1054 + if (is_participant(context_type)) { 1.1055 + if (participants_hide_witnesses) return NULL; 1.1056 + // else fall through to search loop... 1.1057 + } else if (is_witness(context_type) && !ignore_witness(context_type)) { 1.1058 + // The context is an abstract class or interface, to start with. 1.1059 + return context_type; 1.1060 + } 1.1061 + } 1.1062 + 1.1063 + // Now we must check each implementor and each subclass. 1.1064 + // Use a short worklist to avoid blowing the stack. 1.1065 + // Each worklist entry is a *chain* of subklass siblings to process. 1.1066 + const int CHAINMAX = 100; // >= 1 + InstanceKlass::implementors_limit 1.1067 + Klass* chains[CHAINMAX]; 1.1068 + int chaini = 0; // index into worklist 1.1069 + Klass* chain; // scratch variable 1.1070 +#define ADD_SUBCLASS_CHAIN(k) { \ 1.1071 + assert(chaini < CHAINMAX, "oob"); \ 1.1072 + chain = InstanceKlass::cast(k)->subklass(); \ 1.1073 + if (chain != NULL) chains[chaini++] = chain; } 1.1074 + 1.1075 + // Look for non-abstract subclasses. 1.1076 + // (Note: Interfaces do not have subclasses.) 1.1077 + ADD_SUBCLASS_CHAIN(context_type); 1.1078 + 1.1079 + // If it is an interface, search its direct implementors. 1.1080 + // (Their subclasses are additional indirect implementors. 1.1081 + // See InstanceKlass::add_implementor.) 1.1082 + // (Note: nof_implementors is always zero for non-interfaces.) 1.1083 + int nof_impls = InstanceKlass::cast(context_type)->nof_implementors(); 1.1084 + if (nof_impls > 1) { 1.1085 + // Avoid this case: *I.m > { A.m, C }; B.m > C 1.1086 + // Here, I.m has 2 concrete implementations, but m appears unique 1.1087 + // as A.m, because the search misses B.m when checking C. 1.1088 + // The inherited method B.m was getting missed by the walker 1.1089 + // when interface 'I' was the starting point. 1.1090 + // %%% Until this is fixed more systematically, bail out. 1.1091 + // (Old CHA had the same limitation.) 1.1092 + return context_type; 1.1093 + } 1.1094 + if (nof_impls > 0) { 1.1095 + Klass* impl = InstanceKlass::cast(context_type)->implementor(); 1.1096 + assert(impl != NULL, "just checking"); 1.1097 + // If impl is the same as the context_type, then more than one 1.1098 + // implementor has seen. No exact info in this case. 1.1099 + if (impl == context_type) { 1.1100 + return context_type; // report an inexact witness to this sad affair 1.1101 + } 1.1102 + if (do_counts) 1.1103 + { NOT_PRODUCT(deps_find_witness_steps++); } 1.1104 + if (is_participant(impl)) { 1.1105 + if (!participants_hide_witnesses) { 1.1106 + ADD_SUBCLASS_CHAIN(impl); 1.1107 + } 1.1108 + } else if (is_witness(impl) && !ignore_witness(impl)) { 1.1109 + return impl; 1.1110 + } else { 1.1111 + ADD_SUBCLASS_CHAIN(impl); 1.1112 + } 1.1113 + } 1.1114 + 1.1115 + // Recursively process each non-trivial sibling chain. 1.1116 + while (chaini > 0) { 1.1117 + Klass* chain = chains[--chaini]; 1.1118 + for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) { 1.1119 + if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); } 1.1120 + if (is_participant(sub)) { 1.1121 + if (participants_hide_witnesses) continue; 1.1122 + // else fall through to process this guy's subclasses 1.1123 + } else if (is_witness(sub) && !ignore_witness(sub)) { 1.1124 + return sub; 1.1125 + } 1.1126 + if (chaini < (VerifyDependencies? 2: CHAINMAX)) { 1.1127 + // Fast path. (Partially disabled if VerifyDependencies.) 1.1128 + ADD_SUBCLASS_CHAIN(sub); 1.1129 + } else { 1.1130 + // Worklist overflow. Do a recursive call. Should be rare. 1.1131 + // The recursive call will have its own worklist, of course. 1.1132 + // (Note that sub has already been tested, so that there is 1.1133 + // no need for the recursive call to re-test. That's handy, 1.1134 + // since the recursive call sees sub as the context_type.) 1.1135 + if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); } 1.1136 + Klass* witness = find_witness_anywhere(sub, 1.1137 + participants_hide_witnesses, 1.1138 + /*top_level_call=*/ false); 1.1139 + if (witness != NULL) return witness; 1.1140 + } 1.1141 + } 1.1142 + } 1.1143 + 1.1144 + // No witness found. The dependency remains unbroken. 1.1145 + return NULL; 1.1146 +#undef ADD_SUBCLASS_CHAIN 1.1147 +} 1.1148 + 1.1149 + 1.1150 +bool Dependencies::is_concrete_klass(Klass* k) { 1.1151 + if (k->is_abstract()) return false; 1.1152 + // %%% We could treat classes which are concrete but 1.1153 + // have not yet been instantiated as virtually abstract. 1.1154 + // This would require a deoptimization barrier on first instantiation. 1.1155 + //if (k->is_not_instantiated()) return false; 1.1156 + return true; 1.1157 +} 1.1158 + 1.1159 +bool Dependencies::is_concrete_method(Method* m) { 1.1160 + // Statics are irrelevant to virtual call sites. 1.1161 + if (m->is_static()) return false; 1.1162 + 1.1163 + // We could also return false if m does not yet appear to be 1.1164 + // executed, if the VM version supports this distinction also. 1.1165 + // Default methods are considered "concrete" as well. 1.1166 + return !m->is_abstract() && 1.1167 + !m->is_overpass(); // error functions aren't concrete 1.1168 +} 1.1169 + 1.1170 + 1.1171 +Klass* Dependencies::find_finalizable_subclass(Klass* k) { 1.1172 + if (k->is_interface()) return NULL; 1.1173 + if (k->has_finalizer()) return k; 1.1174 + k = k->subklass(); 1.1175 + while (k != NULL) { 1.1176 + Klass* result = find_finalizable_subclass(k); 1.1177 + if (result != NULL) return result; 1.1178 + k = k->next_sibling(); 1.1179 + } 1.1180 + return NULL; 1.1181 +} 1.1182 + 1.1183 + 1.1184 +bool Dependencies::is_concrete_klass(ciInstanceKlass* k) { 1.1185 + if (k->is_abstract()) return false; 1.1186 + // We could also return false if k does not yet appear to be 1.1187 + // instantiated, if the VM version supports this distinction also. 1.1188 + //if (k->is_not_instantiated()) return false; 1.1189 + return true; 1.1190 +} 1.1191 + 1.1192 +bool Dependencies::is_concrete_method(ciMethod* m) { 1.1193 + // Statics are irrelevant to virtual call sites. 1.1194 + if (m->is_static()) return false; 1.1195 + 1.1196 + // We could also return false if m does not yet appear to be 1.1197 + // executed, if the VM version supports this distinction also. 1.1198 + return !m->is_abstract(); 1.1199 +} 1.1200 + 1.1201 + 1.1202 +bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) { 1.1203 + return k->has_finalizable_subclass(); 1.1204 +} 1.1205 + 1.1206 + 1.1207 +// Any use of the contents (bytecodes) of a method must be 1.1208 +// marked by an "evol_method" dependency, if those contents 1.1209 +// can change. (Note: A method is always dependent on itself.) 1.1210 +Klass* Dependencies::check_evol_method(Method* m) { 1.1211 + assert(must_be_in_vm(), "raw oops here"); 1.1212 + // Did somebody do a JVMTI RedefineClasses while our backs were turned? 1.1213 + // Or is there a now a breakpoint? 1.1214 + // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.) 1.1215 + if (m->is_old() 1.1216 + || m->number_of_breakpoints() > 0) { 1.1217 + return m->method_holder(); 1.1218 + } else { 1.1219 + return NULL; 1.1220 + } 1.1221 +} 1.1222 + 1.1223 +// This is a strong assertion: It is that the given type 1.1224 +// has no subtypes whatever. It is most useful for 1.1225 +// optimizing checks on reflected types or on array types. 1.1226 +// (Checks on types which are derived from real instances 1.1227 +// can be optimized more strongly than this, because we 1.1228 +// know that the checked type comes from a concrete type, 1.1229 +// and therefore we can disregard abstract types.) 1.1230 +Klass* Dependencies::check_leaf_type(Klass* ctxk) { 1.1231 + assert(must_be_in_vm(), "raw oops here"); 1.1232 + assert_locked_or_safepoint(Compile_lock); 1.1233 + InstanceKlass* ctx = InstanceKlass::cast(ctxk); 1.1234 + Klass* sub = ctx->subklass(); 1.1235 + if (sub != NULL) { 1.1236 + return sub; 1.1237 + } else if (ctx->nof_implementors() != 0) { 1.1238 + // if it is an interface, it must be unimplemented 1.1239 + // (if it is not an interface, nof_implementors is always zero) 1.1240 + Klass* impl = ctx->implementor(); 1.1241 + assert(impl != NULL, "must be set"); 1.1242 + return impl; 1.1243 + } else { 1.1244 + return NULL; 1.1245 + } 1.1246 +} 1.1247 + 1.1248 +// Test the assertion that conck is the only concrete subtype* of ctxk. 1.1249 +// The type conck itself is allowed to have have further concrete subtypes. 1.1250 +// This allows the compiler to narrow occurrences of ctxk by conck, 1.1251 +// when dealing with the types of actual instances. 1.1252 +Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk, 1.1253 + Klass* conck, 1.1254 + KlassDepChange* changes) { 1.1255 + ClassHierarchyWalker wf(conck); 1.1256 + return wf.find_witness_subtype(ctxk, changes); 1.1257 +} 1.1258 + 1.1259 +// If a non-concrete class has no concrete subtypes, it is not (yet) 1.1260 +// instantiatable. This can allow the compiler to make some paths go 1.1261 +// dead, if they are gated by a test of the type. 1.1262 +Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk, 1.1263 + KlassDepChange* changes) { 1.1264 + // Find any concrete subtype, with no participants: 1.1265 + ClassHierarchyWalker wf; 1.1266 + return wf.find_witness_subtype(ctxk, changes); 1.1267 +} 1.1268 + 1.1269 + 1.1270 +// If a concrete class has no concrete subtypes, it can always be 1.1271 +// exactly typed. This allows the use of a cheaper type test. 1.1272 +Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk, 1.1273 + KlassDepChange* changes) { 1.1274 + // Find any concrete subtype, with only the ctxk as participant: 1.1275 + ClassHierarchyWalker wf(ctxk); 1.1276 + return wf.find_witness_subtype(ctxk, changes); 1.1277 +} 1.1278 + 1.1279 + 1.1280 +// Find the unique concrete proper subtype of ctxk, or NULL if there 1.1281 +// is more than one concrete proper subtype. If there are no concrete 1.1282 +// proper subtypes, return ctxk itself, whether it is concrete or not. 1.1283 +// The returned subtype is allowed to have have further concrete subtypes. 1.1284 +// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }. 1.1285 +Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) { 1.1286 + ClassHierarchyWalker wf(ctxk); // Ignore ctxk when walking. 1.1287 + wf.record_witnesses(1); // Record one other witness when walking. 1.1288 + Klass* wit = wf.find_witness_subtype(ctxk); 1.1289 + if (wit != NULL) return NULL; // Too many witnesses. 1.1290 + Klass* conck = wf.participant(0); 1.1291 + if (conck == NULL) { 1.1292 +#ifndef PRODUCT 1.1293 + // Make sure the dependency mechanism will pass this discovery: 1.1294 + if (VerifyDependencies) { 1.1295 + // Turn off dependency tracing while actually testing deps. 1.1296 + FlagSetting fs(TraceDependencies, false); 1.1297 + if (!Dependencies::is_concrete_klass(ctxk)) { 1.1298 + guarantee(NULL == 1.1299 + (void *)check_abstract_with_no_concrete_subtype(ctxk), 1.1300 + "verify dep."); 1.1301 + } else { 1.1302 + guarantee(NULL == 1.1303 + (void *)check_concrete_with_no_concrete_subtype(ctxk), 1.1304 + "verify dep."); 1.1305 + } 1.1306 + } 1.1307 +#endif //PRODUCT 1.1308 + return ctxk; // Return ctxk as a flag for "no subtypes". 1.1309 + } else { 1.1310 +#ifndef PRODUCT 1.1311 + // Make sure the dependency mechanism will pass this discovery: 1.1312 + if (VerifyDependencies) { 1.1313 + // Turn off dependency tracing while actually testing deps. 1.1314 + FlagSetting fs(TraceDependencies, false); 1.1315 + if (!Dependencies::is_concrete_klass(ctxk)) { 1.1316 + guarantee(NULL == (void *) 1.1317 + check_abstract_with_unique_concrete_subtype(ctxk, conck), 1.1318 + "verify dep."); 1.1319 + } 1.1320 + } 1.1321 +#endif //PRODUCT 1.1322 + return conck; 1.1323 + } 1.1324 +} 1.1325 + 1.1326 +// Test the assertion that the k[12] are the only concrete subtypes of ctxk, 1.1327 +// except possibly for further subtypes of k[12] themselves. 1.1328 +// The context type must be abstract. The types k1 and k2 are themselves 1.1329 +// allowed to have further concrete subtypes. 1.1330 +Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes( 1.1331 + Klass* ctxk, 1.1332 + Klass* k1, 1.1333 + Klass* k2, 1.1334 + KlassDepChange* changes) { 1.1335 + ClassHierarchyWalker wf; 1.1336 + wf.add_participant(k1); 1.1337 + wf.add_participant(k2); 1.1338 + return wf.find_witness_subtype(ctxk, changes); 1.1339 +} 1.1340 + 1.1341 +// Search ctxk for concrete implementations. If there are klen or fewer, 1.1342 +// pack them into the given array and return the number. 1.1343 +// Otherwise, return -1, meaning the given array would overflow. 1.1344 +// (Note that a return of 0 means there are exactly no concrete subtypes.) 1.1345 +// In this search, if ctxk is concrete, it will be reported alone. 1.1346 +// For any type CC reported, no proper subtypes of CC will be reported. 1.1347 +int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk, 1.1348 + int klen, 1.1349 + Klass* karray[]) { 1.1350 + ClassHierarchyWalker wf; 1.1351 + wf.record_witnesses(klen); 1.1352 + Klass* wit = wf.find_witness_subtype(ctxk); 1.1353 + if (wit != NULL) return -1; // Too many witnesses. 1.1354 + int num = wf.num_participants(); 1.1355 + assert(num <= klen, "oob"); 1.1356 + // Pack the result array with the good news. 1.1357 + for (int i = 0; i < num; i++) 1.1358 + karray[i] = wf.participant(i); 1.1359 +#ifndef PRODUCT 1.1360 + // Make sure the dependency mechanism will pass this discovery: 1.1361 + if (VerifyDependencies) { 1.1362 + // Turn off dependency tracing while actually testing deps. 1.1363 + FlagSetting fs(TraceDependencies, false); 1.1364 + switch (Dependencies::is_concrete_klass(ctxk)? -1: num) { 1.1365 + case -1: // ctxk was itself concrete 1.1366 + guarantee(num == 1 && karray[0] == ctxk, "verify dep."); 1.1367 + break; 1.1368 + case 0: 1.1369 + guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk), 1.1370 + "verify dep."); 1.1371 + break; 1.1372 + case 1: 1.1373 + guarantee(NULL == (void *) 1.1374 + check_abstract_with_unique_concrete_subtype(ctxk, karray[0]), 1.1375 + "verify dep."); 1.1376 + break; 1.1377 + case 2: 1.1378 + guarantee(NULL == (void *) 1.1379 + check_abstract_with_exclusive_concrete_subtypes(ctxk, 1.1380 + karray[0], 1.1381 + karray[1]), 1.1382 + "verify dep."); 1.1383 + break; 1.1384 + default: 1.1385 + ShouldNotReachHere(); // klen > 2 yet supported 1.1386 + } 1.1387 + } 1.1388 +#endif //PRODUCT 1.1389 + return num; 1.1390 +} 1.1391 + 1.1392 +// If a class (or interface) has a unique concrete method uniqm, return NULL. 1.1393 +// Otherwise, return a class that contains an interfering method. 1.1394 +Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm, 1.1395 + KlassDepChange* changes) { 1.1396 + // Here is a missing optimization: If uniqm->is_final(), 1.1397 + // we don't really need to search beneath it for overrides. 1.1398 + // This is probably not important, since we don't use dependencies 1.1399 + // to track final methods. (They can't be "definalized".) 1.1400 + ClassHierarchyWalker wf(uniqm->method_holder(), uniqm); 1.1401 + return wf.find_witness_definer(ctxk, changes); 1.1402 +} 1.1403 + 1.1404 +// Find the set of all non-abstract methods under ctxk that match m. 1.1405 +// (The method m must be defined or inherited in ctxk.) 1.1406 +// Include m itself in the set, unless it is abstract. 1.1407 +// If this set has exactly one element, return that element. 1.1408 +Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) { 1.1409 + ClassHierarchyWalker wf(m); 1.1410 + assert(wf.check_method_context(ctxk, m), "proper context"); 1.1411 + wf.record_witnesses(1); 1.1412 + Klass* wit = wf.find_witness_definer(ctxk); 1.1413 + if (wit != NULL) return NULL; // Too many witnesses. 1.1414 + Method* fm = wf.found_method(0); // Will be NULL if num_parts == 0. 1.1415 + if (Dependencies::is_concrete_method(m)) { 1.1416 + if (fm == NULL) { 1.1417 + // It turns out that m was always the only implementation. 1.1418 + fm = m; 1.1419 + } else if (fm != m) { 1.1420 + // Two conflicting implementations after all. 1.1421 + // (This can happen if m is inherited into ctxk and fm overrides it.) 1.1422 + return NULL; 1.1423 + } 1.1424 + } 1.1425 +#ifndef PRODUCT 1.1426 + // Make sure the dependency mechanism will pass this discovery: 1.1427 + if (VerifyDependencies && fm != NULL) { 1.1428 + guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm), 1.1429 + "verify dep."); 1.1430 + } 1.1431 +#endif //PRODUCT 1.1432 + return fm; 1.1433 +} 1.1434 + 1.1435 +Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk, 1.1436 + Method* m1, 1.1437 + Method* m2, 1.1438 + KlassDepChange* changes) { 1.1439 + ClassHierarchyWalker wf(m1); 1.1440 + wf.add_participant(m1->method_holder()); 1.1441 + wf.add_participant(m2->method_holder()); 1.1442 + return wf.find_witness_definer(ctxk, changes); 1.1443 +} 1.1444 + 1.1445 +// Find the set of all non-abstract methods under ctxk that match m[0]. 1.1446 +// (The method m[0] must be defined or inherited in ctxk.) 1.1447 +// Include m itself in the set, unless it is abstract. 1.1448 +// Fill the given array m[0..(mlen-1)] with this set, and return the length. 1.1449 +// (The length may be zero if no concrete methods are found anywhere.) 1.1450 +// If there are too many concrete methods to fit in marray, return -1. 1.1451 +int Dependencies::find_exclusive_concrete_methods(Klass* ctxk, 1.1452 + int mlen, 1.1453 + Method* marray[]) { 1.1454 + Method* m0 = marray[0]; 1.1455 + ClassHierarchyWalker wf(m0); 1.1456 + assert(wf.check_method_context(ctxk, m0), "proper context"); 1.1457 + wf.record_witnesses(mlen); 1.1458 + bool participants_hide_witnesses = true; 1.1459 + Klass* wit = wf.find_witness_definer(ctxk); 1.1460 + if (wit != NULL) return -1; // Too many witnesses. 1.1461 + int num = wf.num_participants(); 1.1462 + assert(num <= mlen, "oob"); 1.1463 + // Keep track of whether m is also part of the result set. 1.1464 + int mfill = 0; 1.1465 + assert(marray[mfill] == m0, "sanity"); 1.1466 + if (Dependencies::is_concrete_method(m0)) 1.1467 + mfill++; // keep m0 as marray[0], the first result 1.1468 + for (int i = 0; i < num; i++) { 1.1469 + Method* fm = wf.found_method(i); 1.1470 + if (fm == m0) continue; // Already put this guy in the list. 1.1471 + if (mfill == mlen) { 1.1472 + return -1; // Oops. Too many methods after all! 1.1473 + } 1.1474 + marray[mfill++] = fm; 1.1475 + } 1.1476 +#ifndef PRODUCT 1.1477 + // Make sure the dependency mechanism will pass this discovery: 1.1478 + if (VerifyDependencies) { 1.1479 + // Turn off dependency tracing while actually testing deps. 1.1480 + FlagSetting fs(TraceDependencies, false); 1.1481 + switch (mfill) { 1.1482 + case 1: 1.1483 + guarantee(NULL == (void *)check_unique_concrete_method(ctxk, marray[0]), 1.1484 + "verify dep."); 1.1485 + break; 1.1486 + case 2: 1.1487 + guarantee(NULL == (void *) 1.1488 + check_exclusive_concrete_methods(ctxk, marray[0], marray[1]), 1.1489 + "verify dep."); 1.1490 + break; 1.1491 + default: 1.1492 + ShouldNotReachHere(); // mlen > 2 yet supported 1.1493 + } 1.1494 + } 1.1495 +#endif //PRODUCT 1.1496 + return mfill; 1.1497 +} 1.1498 + 1.1499 + 1.1500 +Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) { 1.1501 + Klass* search_at = ctxk; 1.1502 + if (changes != NULL) 1.1503 + search_at = changes->new_type(); // just look at the new bit 1.1504 + return find_finalizable_subclass(search_at); 1.1505 +} 1.1506 + 1.1507 + 1.1508 +Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) { 1.1509 + assert(call_site ->is_a(SystemDictionary::CallSite_klass()), "sanity"); 1.1510 + assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity"); 1.1511 + if (changes == NULL) { 1.1512 + // Validate all CallSites 1.1513 + if (java_lang_invoke_CallSite::target(call_site) != method_handle) 1.1514 + return call_site->klass(); // assertion failed 1.1515 + } else { 1.1516 + // Validate the given CallSite 1.1517 + if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) { 1.1518 + assert(method_handle != changes->method_handle(), "must be"); 1.1519 + return call_site->klass(); // assertion failed 1.1520 + } 1.1521 + } 1.1522 + return NULL; // assertion still valid 1.1523 +} 1.1524 + 1.1525 + 1.1526 +void Dependencies::DepStream::trace_and_log_witness(Klass* witness) { 1.1527 + if (witness != NULL) { 1.1528 + if (TraceDependencies) { 1.1529 + print_dependency(witness, /*verbose=*/ true); 1.1530 + } 1.1531 + // The following is a no-op unless logging is enabled: 1.1532 + log_dependency(witness); 1.1533 + } 1.1534 +} 1.1535 + 1.1536 + 1.1537 +Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) { 1.1538 + assert_locked_or_safepoint(Compile_lock); 1.1539 + Dependencies::check_valid_dependency_type(type()); 1.1540 + 1.1541 + Klass* witness = NULL; 1.1542 + switch (type()) { 1.1543 + case evol_method: 1.1544 + witness = check_evol_method(method_argument(0)); 1.1545 + break; 1.1546 + case leaf_type: 1.1547 + witness = check_leaf_type(context_type()); 1.1548 + break; 1.1549 + case abstract_with_unique_concrete_subtype: 1.1550 + witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes); 1.1551 + break; 1.1552 + case abstract_with_no_concrete_subtype: 1.1553 + witness = check_abstract_with_no_concrete_subtype(context_type(), changes); 1.1554 + break; 1.1555 + case concrete_with_no_concrete_subtype: 1.1556 + witness = check_concrete_with_no_concrete_subtype(context_type(), changes); 1.1557 + break; 1.1558 + case unique_concrete_method: 1.1559 + witness = check_unique_concrete_method(context_type(), method_argument(1), changes); 1.1560 + break; 1.1561 + case abstract_with_exclusive_concrete_subtypes_2: 1.1562 + witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes); 1.1563 + break; 1.1564 + case exclusive_concrete_methods_2: 1.1565 + witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes); 1.1566 + break; 1.1567 + case no_finalizable_subclasses: 1.1568 + witness = check_has_no_finalizable_subclasses(context_type(), changes); 1.1569 + break; 1.1570 + default: 1.1571 + witness = NULL; 1.1572 + break; 1.1573 + } 1.1574 + trace_and_log_witness(witness); 1.1575 + return witness; 1.1576 +} 1.1577 + 1.1578 + 1.1579 +Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) { 1.1580 + assert_locked_or_safepoint(Compile_lock); 1.1581 + Dependencies::check_valid_dependency_type(type()); 1.1582 + 1.1583 + Klass* witness = NULL; 1.1584 + switch (type()) { 1.1585 + case call_site_target_value: 1.1586 + witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes); 1.1587 + break; 1.1588 + default: 1.1589 + witness = NULL; 1.1590 + break; 1.1591 + } 1.1592 + trace_and_log_witness(witness); 1.1593 + return witness; 1.1594 +} 1.1595 + 1.1596 + 1.1597 +Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) { 1.1598 + // Handle klass dependency 1.1599 + if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type())) 1.1600 + return check_klass_dependency(changes.as_klass_change()); 1.1601 + 1.1602 + // Handle CallSite dependency 1.1603 + if (changes.is_call_site_change()) 1.1604 + return check_call_site_dependency(changes.as_call_site_change()); 1.1605 + 1.1606 + // irrelevant dependency; skip it 1.1607 + return NULL; 1.1608 +} 1.1609 + 1.1610 + 1.1611 +void DepChange::print() { 1.1612 + int nsup = 0, nint = 0; 1.1613 + for (ContextStream str(*this); str.next(); ) { 1.1614 + Klass* k = str.klass(); 1.1615 + switch (str.change_type()) { 1.1616 + case Change_new_type: 1.1617 + tty->print_cr(" dependee = %s", InstanceKlass::cast(k)->external_name()); 1.1618 + break; 1.1619 + case Change_new_sub: 1.1620 + if (!WizardMode) { 1.1621 + ++nsup; 1.1622 + } else { 1.1623 + tty->print_cr(" context super = %s", InstanceKlass::cast(k)->external_name()); 1.1624 + } 1.1625 + break; 1.1626 + case Change_new_impl: 1.1627 + if (!WizardMode) { 1.1628 + ++nint; 1.1629 + } else { 1.1630 + tty->print_cr(" context interface = %s", InstanceKlass::cast(k)->external_name()); 1.1631 + } 1.1632 + break; 1.1633 + } 1.1634 + } 1.1635 + if (nsup + nint != 0) { 1.1636 + tty->print_cr(" context supers = %d, interfaces = %d", nsup, nint); 1.1637 + } 1.1638 +} 1.1639 + 1.1640 +void DepChange::ContextStream::start() { 1.1641 + Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL; 1.1642 + _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass); 1.1643 + _klass = new_type; 1.1644 + _ti_base = NULL; 1.1645 + _ti_index = 0; 1.1646 + _ti_limit = 0; 1.1647 +} 1.1648 + 1.1649 +bool DepChange::ContextStream::next() { 1.1650 + switch (_change_type) { 1.1651 + case Start_Klass: // initial state; _klass is the new type 1.1652 + _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces(); 1.1653 + _ti_index = 0; 1.1654 + _change_type = Change_new_type; 1.1655 + return true; 1.1656 + case Change_new_type: 1.1657 + // fall through: 1.1658 + _change_type = Change_new_sub; 1.1659 + case Change_new_sub: 1.1660 + // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277 1.1661 + { 1.1662 + _klass = InstanceKlass::cast(_klass)->super(); 1.1663 + if (_klass != NULL) { 1.1664 + return true; 1.1665 + } 1.1666 + } 1.1667 + // else set up _ti_limit and fall through: 1.1668 + _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length(); 1.1669 + _change_type = Change_new_impl; 1.1670 + case Change_new_impl: 1.1671 + if (_ti_index < _ti_limit) { 1.1672 + _klass = _ti_base->at(_ti_index++); 1.1673 + return true; 1.1674 + } 1.1675 + // fall through: 1.1676 + _change_type = NO_CHANGE; // iterator is exhausted 1.1677 + case NO_CHANGE: 1.1678 + break; 1.1679 + default: 1.1680 + ShouldNotReachHere(); 1.1681 + } 1.1682 + return false; 1.1683 +} 1.1684 + 1.1685 +void KlassDepChange::initialize() { 1.1686 + // entire transaction must be under this lock: 1.1687 + assert_lock_strong(Compile_lock); 1.1688 + 1.1689 + // Mark all dependee and all its superclasses 1.1690 + // Mark transitive interfaces 1.1691 + for (ContextStream str(*this); str.next(); ) { 1.1692 + Klass* d = str.klass(); 1.1693 + assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking"); 1.1694 + InstanceKlass::cast(d)->set_is_marked_dependent(true); 1.1695 + } 1.1696 +} 1.1697 + 1.1698 +KlassDepChange::~KlassDepChange() { 1.1699 + // Unmark all dependee and all its superclasses 1.1700 + // Unmark transitive interfaces 1.1701 + for (ContextStream str(*this); str.next(); ) { 1.1702 + Klass* d = str.klass(); 1.1703 + InstanceKlass::cast(d)->set_is_marked_dependent(false); 1.1704 + } 1.1705 +} 1.1706 + 1.1707 +bool KlassDepChange::involves_context(Klass* k) { 1.1708 + if (k == NULL || !k->oop_is_instance()) { 1.1709 + return false; 1.1710 + } 1.1711 + InstanceKlass* ik = InstanceKlass::cast(k); 1.1712 + bool is_contained = ik->is_marked_dependent(); 1.1713 + assert(is_contained == new_type()->is_subtype_of(k), 1.1714 + "correct marking of potential context types"); 1.1715 + return is_contained; 1.1716 +} 1.1717 + 1.1718 +#ifndef PRODUCT 1.1719 +void Dependencies::print_statistics() { 1.1720 + if (deps_find_witness_print != 0) { 1.1721 + // Call one final time, to flush out the data. 1.1722 + deps_find_witness_print = -1; 1.1723 + count_find_witness_calls(); 1.1724 + } 1.1725 +} 1.1726 +#endif