src/share/vm/code/dependencies.cpp

Fri, 08 Nov 2013 01:13:11 -0800

author
vlivanov
date
Fri, 08 Nov 2013 01:13:11 -0800
changeset 6096
e2509677809c
parent 5848
ac9cb1d5a202
child 6286
709018897c81
child 6305
40353abd7984
permissions
-rw-r--r--

8023037: Race between ciEnv::register_method and nmethod::make_not_entrant_or_zombie
Reviewed-by: kvn, iveresov

     1 /*
     2  * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "ci/ciArrayKlass.hpp"
    27 #include "ci/ciEnv.hpp"
    28 #include "ci/ciKlass.hpp"
    29 #include "ci/ciMethod.hpp"
    30 #include "code/dependencies.hpp"
    31 #include "compiler/compileLog.hpp"
    32 #include "oops/oop.inline.hpp"
    33 #include "runtime/handles.hpp"
    34 #include "runtime/handles.inline.hpp"
    35 #include "utilities/copy.hpp"
    38 #ifdef ASSERT
    39 static bool must_be_in_vm() {
    40   Thread* thread = Thread::current();
    41   if (thread->is_Java_thread())
    42     return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
    43   else
    44     return true;  //something like this: thread->is_VM_thread();
    45 }
    46 #endif //ASSERT
    48 void Dependencies::initialize(ciEnv* env) {
    49   Arena* arena = env->arena();
    50   _oop_recorder = env->oop_recorder();
    51   _log = env->log();
    52   _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
    53   DEBUG_ONLY(_deps[end_marker] = NULL);
    54   for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
    55     _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
    56   }
    57   _content_bytes = NULL;
    58   _size_in_bytes = (size_t)-1;
    60   assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
    61 }
    63 void Dependencies::assert_evol_method(ciMethod* m) {
    64   assert_common_1(evol_method, m);
    65 }
    67 void Dependencies::assert_leaf_type(ciKlass* ctxk) {
    68   if (ctxk->is_array_klass()) {
    69     // As a special case, support this assertion on an array type,
    70     // which reduces to an assertion on its element type.
    71     // Note that this cannot be done with assertions that
    72     // relate to concreteness or abstractness.
    73     ciType* elemt = ctxk->as_array_klass()->base_element_type();
    74     if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
    75     ctxk = elemt->as_instance_klass();
    76     //if (ctxk->is_final())  return;            // Ex:  String[][]
    77   }
    78   check_ctxk(ctxk);
    79   assert_common_1(leaf_type, ctxk);
    80 }
    82 void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
    83   check_ctxk_abstract(ctxk);
    84   assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
    85 }
    87 void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
    88   check_ctxk_abstract(ctxk);
    89   assert_common_1(abstract_with_no_concrete_subtype, ctxk);
    90 }
    92 void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
    93   check_ctxk_concrete(ctxk);
    94   assert_common_1(concrete_with_no_concrete_subtype, ctxk);
    95 }
    97 void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
    98   check_ctxk(ctxk);
    99   assert_common_2(unique_concrete_method, ctxk, uniqm);
   100 }
   102 void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
   103   check_ctxk(ctxk);
   104   assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
   105 }
   107 void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
   108   check_ctxk(ctxk);
   109   assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
   110 }
   112 void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
   113   check_ctxk(ctxk);
   114   assert_common_1(no_finalizable_subclasses, ctxk);
   115 }
   117 void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
   118   check_ctxk(call_site->klass());
   119   assert_common_2(call_site_target_value, call_site, method_handle);
   120 }
   122 // Helper function.  If we are adding a new dep. under ctxk2,
   123 // try to find an old dep. under a broader* ctxk1.  If there is
   124 //
   125 bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
   126                                     int ctxk_i, ciKlass* ctxk2) {
   127   ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
   128   if (ctxk2->is_subtype_of(ctxk1)) {
   129     return true;  // success, and no need to change
   130   } else if (ctxk1->is_subtype_of(ctxk2)) {
   131     // new context class fully subsumes previous one
   132     deps->at_put(ctxk_i, ctxk2);
   133     return true;
   134   } else {
   135     return false;
   136   }
   137 }
   139 void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
   140   assert(dep_args(dept) == 1, "sanity");
   141   log_dependency(dept, x);
   142   GrowableArray<ciBaseObject*>* deps = _deps[dept];
   144   // see if the same (or a similar) dep is already recorded
   145   if (note_dep_seen(dept, x)) {
   146     assert(deps->find(x) >= 0, "sanity");
   147   } else {
   148     deps->append(x);
   149   }
   150 }
   152 void Dependencies::assert_common_2(DepType dept,
   153                                    ciBaseObject* x0, ciBaseObject* x1) {
   154   assert(dep_args(dept) == 2, "sanity");
   155   log_dependency(dept, x0, x1);
   156   GrowableArray<ciBaseObject*>* deps = _deps[dept];
   158   // see if the same (or a similar) dep is already recorded
   159   bool has_ctxk = has_explicit_context_arg(dept);
   160   if (has_ctxk) {
   161     assert(dep_context_arg(dept) == 0, "sanity");
   162     if (note_dep_seen(dept, x1)) {
   163       // look in this bucket for redundant assertions
   164       const int stride = 2;
   165       for (int i = deps->length(); (i -= stride) >= 0; ) {
   166         ciBaseObject* y1 = deps->at(i+1);
   167         if (x1 == y1) {  // same subject; check the context
   168           if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
   169             return;
   170           }
   171         }
   172       }
   173     }
   174   } else {
   175     assert(dep_implicit_context_arg(dept) == 0, "sanity");
   176     if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
   177       // look in this bucket for redundant assertions
   178       const int stride = 2;
   179       for (int i = deps->length(); (i -= stride) >= 0; ) {
   180         ciBaseObject* y0 = deps->at(i+0);
   181         ciBaseObject* y1 = deps->at(i+1);
   182         if (x0 == y0 && x1 == y1) {
   183           return;
   184         }
   185       }
   186     }
   187   }
   189   // append the assertion in the correct bucket:
   190   deps->append(x0);
   191   deps->append(x1);
   192 }
   194 void Dependencies::assert_common_3(DepType dept,
   195                                    ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
   196   assert(dep_context_arg(dept) == 0, "sanity");
   197   assert(dep_args(dept) == 3, "sanity");
   198   log_dependency(dept, ctxk, x, x2);
   199   GrowableArray<ciBaseObject*>* deps = _deps[dept];
   201   // try to normalize an unordered pair:
   202   bool swap = false;
   203   switch (dept) {
   204   case abstract_with_exclusive_concrete_subtypes_2:
   205     swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
   206     break;
   207   case exclusive_concrete_methods_2:
   208     swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
   209     break;
   210   }
   211   if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
   213   // see if the same (or a similar) dep is already recorded
   214   if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
   215     // look in this bucket for redundant assertions
   216     const int stride = 3;
   217     for (int i = deps->length(); (i -= stride) >= 0; ) {
   218       ciBaseObject* y  = deps->at(i+1);
   219       ciBaseObject* y2 = deps->at(i+2);
   220       if (x == y && x2 == y2) {  // same subjects; check the context
   221         if (maybe_merge_ctxk(deps, i+0, ctxk)) {
   222           return;
   223         }
   224       }
   225     }
   226   }
   227   // append the assertion in the correct bucket:
   228   deps->append(ctxk);
   229   deps->append(x);
   230   deps->append(x2);
   231 }
   233 /// Support for encoding dependencies into an nmethod:
   235 void Dependencies::copy_to(nmethod* nm) {
   236   address beg = nm->dependencies_begin();
   237   address end = nm->dependencies_end();
   238   guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
   239   Copy::disjoint_words((HeapWord*) content_bytes(),
   240                        (HeapWord*) beg,
   241                        size_in_bytes() / sizeof(HeapWord));
   242   assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
   243 }
   245 static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
   246   for (int i = 0; i < narg; i++) {
   247     int diff = p1[i]->ident() - p2[i]->ident();
   248     if (diff != 0)  return diff;
   249   }
   250   return 0;
   251 }
   252 static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
   253 { return sort_dep(p1, p2, 1); }
   254 static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
   255 { return sort_dep(p1, p2, 2); }
   256 static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
   257 { return sort_dep(p1, p2, 3); }
   259 void Dependencies::sort_all_deps() {
   260   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
   261     DepType dept = (DepType)deptv;
   262     GrowableArray<ciBaseObject*>* deps = _deps[dept];
   263     if (deps->length() <= 1)  continue;
   264     switch (dep_args(dept)) {
   265     case 1: deps->sort(sort_dep_arg_1, 1); break;
   266     case 2: deps->sort(sort_dep_arg_2, 2); break;
   267     case 3: deps->sort(sort_dep_arg_3, 3); break;
   268     default: ShouldNotReachHere();
   269     }
   270   }
   271 }
   273 size_t Dependencies::estimate_size_in_bytes() {
   274   size_t est_size = 100;
   275   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
   276     DepType dept = (DepType)deptv;
   277     GrowableArray<ciBaseObject*>* deps = _deps[dept];
   278     est_size += deps->length()*2;  // tags and argument(s)
   279   }
   280   return est_size;
   281 }
   283 ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
   284   switch (dept) {
   285   case abstract_with_exclusive_concrete_subtypes_2:
   286     return x->as_metadata()->as_klass();
   287   case unique_concrete_method:
   288   case exclusive_concrete_methods_2:
   289     return x->as_metadata()->as_method()->holder();
   290   }
   291   return NULL;  // let NULL be NULL
   292 }
   294 Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
   295   assert(must_be_in_vm(), "raw oops here");
   296   switch (dept) {
   297   case abstract_with_exclusive_concrete_subtypes_2:
   298     assert(x->is_klass(), "sanity");
   299     return (Klass*) x;
   300   case unique_concrete_method:
   301   case exclusive_concrete_methods_2:
   302     assert(x->is_method(), "sanity");
   303     return ((Method*)x)->method_holder();
   304   }
   305   return NULL;  // let NULL be NULL
   306 }
   308 void Dependencies::encode_content_bytes() {
   309   sort_all_deps();
   311   // cast is safe, no deps can overflow INT_MAX
   312   CompressedWriteStream bytes((int)estimate_size_in_bytes());
   314   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
   315     DepType dept = (DepType)deptv;
   316     GrowableArray<ciBaseObject*>* deps = _deps[dept];
   317     if (deps->length() == 0)  continue;
   318     int stride = dep_args(dept);
   319     int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
   320     assert(stride > 0, "sanity");
   321     for (int i = 0; i < deps->length(); i += stride) {
   322       jbyte code_byte = (jbyte)dept;
   323       int skipj = -1;
   324       if (ctxkj >= 0 && ctxkj+1 < stride) {
   325         ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
   326         ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
   327         if (ctxk == ctxk_encoded_as_null(dept, x)) {
   328           skipj = ctxkj;  // we win:  maybe one less oop to keep track of
   329           code_byte |= default_context_type_bit;
   330         }
   331       }
   332       bytes.write_byte(code_byte);
   333       for (int j = 0; j < stride; j++) {
   334         if (j == skipj)  continue;
   335         ciBaseObject* v = deps->at(i+j);
   336         int idx;
   337         if (v->is_object()) {
   338           idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
   339         } else {
   340           ciMetadata* meta = v->as_metadata();
   341           idx = _oop_recorder->find_index(meta->constant_encoding());
   342         }
   343         bytes.write_int(idx);
   344       }
   345     }
   346   }
   348   // write a sentinel byte to mark the end
   349   bytes.write_byte(end_marker);
   351   // round it out to a word boundary
   352   while (bytes.position() % sizeof(HeapWord) != 0) {
   353     bytes.write_byte(end_marker);
   354   }
   356   // check whether the dept byte encoding really works
   357   assert((jbyte)default_context_type_bit != 0, "byte overflow");
   359   _content_bytes = bytes.buffer();
   360   _size_in_bytes = bytes.position();
   361 }
   364 const char* Dependencies::_dep_name[TYPE_LIMIT] = {
   365   "end_marker",
   366   "evol_method",
   367   "leaf_type",
   368   "abstract_with_unique_concrete_subtype",
   369   "abstract_with_no_concrete_subtype",
   370   "concrete_with_no_concrete_subtype",
   371   "unique_concrete_method",
   372   "abstract_with_exclusive_concrete_subtypes_2",
   373   "exclusive_concrete_methods_2",
   374   "no_finalizable_subclasses",
   375   "call_site_target_value"
   376 };
   378 int Dependencies::_dep_args[TYPE_LIMIT] = {
   379   -1,// end_marker
   380   1, // evol_method m
   381   1, // leaf_type ctxk
   382   2, // abstract_with_unique_concrete_subtype ctxk, k
   383   1, // abstract_with_no_concrete_subtype ctxk
   384   1, // concrete_with_no_concrete_subtype ctxk
   385   2, // unique_concrete_method ctxk, m
   386   3, // unique_concrete_subtypes_2 ctxk, k1, k2
   387   3, // unique_concrete_methods_2 ctxk, m1, m2
   388   1, // no_finalizable_subclasses ctxk
   389   2  // call_site_target_value call_site, method_handle
   390 };
   392 const char* Dependencies::dep_name(Dependencies::DepType dept) {
   393   if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
   394   return _dep_name[dept];
   395 }
   397 int Dependencies::dep_args(Dependencies::DepType dept) {
   398   if (!dept_in_mask(dept, all_types))  return -1;
   399   return _dep_args[dept];
   400 }
   402 void Dependencies::check_valid_dependency_type(DepType dept) {
   403   guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
   404 }
   406 // for the sake of the compiler log, print out current dependencies:
   407 void Dependencies::log_all_dependencies() {
   408   if (log() == NULL)  return;
   409   ciBaseObject* args[max_arg_count];
   410   for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
   411     DepType dept = (DepType)deptv;
   412     GrowableArray<ciBaseObject*>* deps = _deps[dept];
   413     if (deps->length() == 0)  continue;
   414     int stride = dep_args(dept);
   415     for (int i = 0; i < deps->length(); i += stride) {
   416       for (int j = 0; j < stride; j++) {
   417         // flush out the identities before printing
   418         args[j] = deps->at(i+j);
   419       }
   420       write_dependency_to(log(), dept, stride, args);
   421     }
   422   }
   423 }
   425 void Dependencies::write_dependency_to(CompileLog* log,
   426                                        DepType dept,
   427                                        int nargs, DepArgument args[],
   428                                        Klass* witness) {
   429   if (log == NULL) {
   430     return;
   431   }
   432   ciEnv* env = ciEnv::current();
   433   ciBaseObject* ciargs[max_arg_count];
   434   assert(nargs <= max_arg_count, "oob");
   435   for (int j = 0; j < nargs; j++) {
   436     if (args[j].is_oop()) {
   437       ciargs[j] = env->get_object(args[j].oop_value());
   438     } else {
   439       ciargs[j] = env->get_metadata(args[j].metadata_value());
   440     }
   441   }
   442   Dependencies::write_dependency_to(log, dept, nargs, ciargs, witness);
   443 }
   445 void Dependencies::write_dependency_to(CompileLog* log,
   446                                        DepType dept,
   447                                        int nargs, ciBaseObject* args[],
   448                                        Klass* witness) {
   449   if (log == NULL)  return;
   450   assert(nargs <= max_arg_count, "oob");
   451   int argids[max_arg_count];
   452   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
   453   int j;
   454   for (j = 0; j < nargs; j++) {
   455     if (args[j]->is_object()) {
   456       argids[j] = log->identify(args[j]->as_object());
   457     } else {
   458       argids[j] = log->identify(args[j]->as_metadata());
   459     }
   460   }
   461   if (witness != NULL) {
   462     log->begin_elem("dependency_failed");
   463   } else {
   464     log->begin_elem("dependency");
   465   }
   466   log->print(" type='%s'", dep_name(dept));
   467   if (ctxkj >= 0) {
   468     log->print(" ctxk='%d'", argids[ctxkj]);
   469   }
   470   // write remaining arguments, if any.
   471   for (j = 0; j < nargs; j++) {
   472     if (j == ctxkj)  continue;  // already logged
   473     if (j == 1) {
   474       log->print(  " x='%d'",    argids[j]);
   475     } else {
   476       log->print(" x%d='%d'", j, argids[j]);
   477     }
   478   }
   479   if (witness != NULL) {
   480     log->object("witness", witness);
   481     log->stamp();
   482   }
   483   log->end_elem();
   484 }
   486 void Dependencies::write_dependency_to(xmlStream* xtty,
   487                                        DepType dept,
   488                                        int nargs, DepArgument args[],
   489                                        Klass* witness) {
   490   if (xtty == NULL)  return;
   491   ttyLocker ttyl;
   492   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
   493   if (witness != NULL) {
   494     xtty->begin_elem("dependency_failed");
   495   } else {
   496     xtty->begin_elem("dependency");
   497   }
   498   xtty->print(" type='%s'", dep_name(dept));
   499   if (ctxkj >= 0) {
   500     xtty->object("ctxk", args[ctxkj].metadata_value());
   501   }
   502   // write remaining arguments, if any.
   503   for (int j = 0; j < nargs; j++) {
   504     if (j == ctxkj)  continue;  // already logged
   505     if (j == 1) {
   506       if (args[j].is_oop()) {
   507         xtty->object("x", args[j].oop_value());
   508       } else {
   509         xtty->object("x", args[j].metadata_value());
   510       }
   511     } else {
   512       char xn[10]; sprintf(xn, "x%d", j);
   513       if (args[j].is_oop()) {
   514         xtty->object(xn, args[j].oop_value());
   515       } else {
   516         xtty->object(xn, args[j].metadata_value());
   517       }
   518     }
   519   }
   520   if (witness != NULL) {
   521     xtty->object("witness", witness);
   522     xtty->stamp();
   523   }
   524   xtty->end_elem();
   525 }
   527 void Dependencies::print_dependency(DepType dept, int nargs, DepArgument args[],
   528                                     Klass* witness) {
   529   ResourceMark rm;
   530   ttyLocker ttyl;   // keep the following output all in one block
   531   tty->print_cr("%s of type %s",
   532                 (witness == NULL)? "Dependency": "Failed dependency",
   533                 dep_name(dept));
   534   // print arguments
   535   int ctxkj = dep_context_arg(dept);  // -1 if no context arg
   536   for (int j = 0; j < nargs; j++) {
   537     DepArgument arg = args[j];
   538     bool put_star = false;
   539     if (arg.is_null())  continue;
   540     const char* what;
   541     if (j == ctxkj) {
   542       assert(arg.is_metadata(), "must be");
   543       what = "context";
   544       put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
   545     } else if (arg.is_method()) {
   546       what = "method ";
   547       put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value());
   548     } else if (arg.is_klass()) {
   549       what = "class  ";
   550     } else {
   551       what = "object ";
   552     }
   553     tty->print("  %s = %s", what, (put_star? "*": ""));
   554     if (arg.is_klass())
   555       tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
   556     else if (arg.is_method())
   557       ((Method*)arg.metadata_value())->print_value();
   558     else
   559       ShouldNotReachHere(); // Provide impl for this type.
   560     tty->cr();
   561   }
   562   if (witness != NULL) {
   563     bool put_star = !Dependencies::is_concrete_klass(witness);
   564     tty->print_cr("  witness = %s%s",
   565                   (put_star? "*": ""),
   566                   witness->external_name());
   567   }
   568 }
   570 void Dependencies::DepStream::log_dependency(Klass* witness) {
   571   if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
   572   ResourceMark rm;
   573   int nargs = argument_count();
   574   DepArgument args[max_arg_count];
   575   for (int j = 0; j < nargs; j++) {
   576     if (type() == call_site_target_value) {
   577       args[j] = argument_oop(j);
   578     } else {
   579       args[j] = argument(j);
   580     }
   581   }
   582   if (_deps != NULL && _deps->log() != NULL) {
   583     Dependencies::write_dependency_to(_deps->log(),
   584                                       type(), nargs, args, witness);
   585   } else {
   586     Dependencies::write_dependency_to(xtty,
   587                                       type(), nargs, args, witness);
   588   }
   589 }
   591 void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
   592   int nargs = argument_count();
   593   DepArgument args[max_arg_count];
   594   for (int j = 0; j < nargs; j++) {
   595     args[j] = argument(j);
   596   }
   597   Dependencies::print_dependency(type(), nargs, args, witness);
   598   if (verbose) {
   599     if (_code != NULL) {
   600       tty->print("  code: ");
   601       _code->print_value_on(tty);
   602       tty->cr();
   603     }
   604   }
   605 }
   608 /// Dependency stream support (decodes dependencies from an nmethod):
   610 #ifdef ASSERT
   611 void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
   612   assert(must_be_in_vm(), "raw oops here");
   613   _byte_limit = byte_limit;
   614   _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
   615   assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
   616 }
   617 #endif //ASSERT
   619 bool Dependencies::DepStream::next() {
   620   assert(_type != end_marker, "already at end");
   621   if (_bytes.position() == 0 && _code != NULL
   622       && _code->dependencies_size() == 0) {
   623     // Method has no dependencies at all.
   624     return false;
   625   }
   626   int code_byte = (_bytes.read_byte() & 0xFF);
   627   if (code_byte == end_marker) {
   628     DEBUG_ONLY(_type = end_marker);
   629     return false;
   630   } else {
   631     int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
   632     code_byte -= ctxk_bit;
   633     DepType dept = (DepType)code_byte;
   634     _type = dept;
   635     Dependencies::check_valid_dependency_type(dept);
   636     int stride = _dep_args[dept];
   637     assert(stride == dep_args(dept), "sanity");
   638     int skipj = -1;
   639     if (ctxk_bit != 0) {
   640       skipj = 0;  // currently the only context argument is at zero
   641       assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
   642     }
   643     for (int j = 0; j < stride; j++) {
   644       _xi[j] = (j == skipj)? 0: _bytes.read_int();
   645     }
   646     DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
   647     return true;
   648   }
   649 }
   651 inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
   652   Metadata* o = NULL;
   653   if (_code != NULL) {
   654     o = _code->metadata_at(i);
   655   } else {
   656     o = _deps->oop_recorder()->metadata_at(i);
   657   }
   658   assert(o == NULL || o->is_metaspace_object(),
   659          err_msg("Should be metadata " PTR_FORMAT, o));
   660   return o;
   661 }
   663 inline oop Dependencies::DepStream::recorded_oop_at(int i) {
   664   return (_code != NULL)
   665          ? _code->oop_at(i)
   666     : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
   667 }
   669 Metadata* Dependencies::DepStream::argument(int i) {
   670   Metadata* result = recorded_metadata_at(argument_index(i));
   672   if (result == NULL) { // Explicit context argument can be compressed
   673     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
   674     if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
   675       result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
   676     }
   677   }
   679   assert(result == NULL || result->is_klass() || result->is_method(), "must be");
   680   return result;
   681 }
   683 oop Dependencies::DepStream::argument_oop(int i) {
   684   oop result = recorded_oop_at(argument_index(i));
   685   assert(result == NULL || result->is_oop(), "must be");
   686   return result;
   687 }
   689 Klass* Dependencies::DepStream::context_type() {
   690   assert(must_be_in_vm(), "raw oops here");
   692   // Most dependencies have an explicit context type argument.
   693   {
   694     int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
   695     if (ctxkj >= 0) {
   696       Metadata* k = argument(ctxkj);
   697       assert(k != NULL && k->is_klass(), "type check");
   698       return (Klass*)k;
   699     }
   700   }
   702   // Some dependencies are using the klass of the first object
   703   // argument as implicit context type (e.g. call_site_target_value).
   704   {
   705     int ctxkj = dep_implicit_context_arg(type());
   706     if (ctxkj >= 0) {
   707       Klass* k = argument_oop(ctxkj)->klass();
   708       assert(k != NULL && k->is_klass(), "type check");
   709       return (Klass*) k;
   710     }
   711   }
   713   // And some dependencies don't have a context type at all,
   714   // e.g. evol_method.
   715   return NULL;
   716 }
   718 /// Checking dependencies:
   720 // This hierarchy walker inspects subtypes of a given type,
   721 // trying to find a "bad" class which breaks a dependency.
   722 // Such a class is called a "witness" to the broken dependency.
   723 // While searching around, we ignore "participants", which
   724 // are already known to the dependency.
   725 class ClassHierarchyWalker {
   726  public:
   727   enum { PARTICIPANT_LIMIT = 3 };
   729  private:
   730   // optional method descriptor to check for:
   731   Symbol* _name;
   732   Symbol* _signature;
   734   // special classes which are not allowed to be witnesses:
   735   Klass*    _participants[PARTICIPANT_LIMIT+1];
   736   int       _num_participants;
   738   // cache of method lookups
   739   Method* _found_methods[PARTICIPANT_LIMIT+1];
   741   // if non-zero, tells how many witnesses to convert to participants
   742   int       _record_witnesses;
   744   void initialize(Klass* participant) {
   745     _record_witnesses = 0;
   746     _participants[0]  = participant;
   747     _found_methods[0] = NULL;
   748     _num_participants = 0;
   749     if (participant != NULL) {
   750       // Terminating NULL.
   751       _participants[1] = NULL;
   752       _found_methods[1] = NULL;
   753       _num_participants = 1;
   754     }
   755   }
   757   void initialize_from_method(Method* m) {
   758     assert(m != NULL && m->is_method(), "sanity");
   759     _name      = m->name();
   760     _signature = m->signature();
   761   }
   763  public:
   764   // The walker is initialized to recognize certain methods and/or types
   765   // as friendly participants.
   766   ClassHierarchyWalker(Klass* participant, Method* m) {
   767     initialize_from_method(m);
   768     initialize(participant);
   769   }
   770   ClassHierarchyWalker(Method* m) {
   771     initialize_from_method(m);
   772     initialize(NULL);
   773   }
   774   ClassHierarchyWalker(Klass* participant = NULL) {
   775     _name      = NULL;
   776     _signature = NULL;
   777     initialize(participant);
   778   }
   780   // This is common code for two searches:  One for concrete subtypes,
   781   // the other for concrete method implementations and overrides.
   782   bool doing_subtype_search() {
   783     return _name == NULL;
   784   }
   786   int num_participants() { return _num_participants; }
   787   Klass* participant(int n) {
   788     assert((uint)n <= (uint)_num_participants, "oob");
   789     return _participants[n];
   790   }
   792   // Note:  If n==num_participants, returns NULL.
   793   Method* found_method(int n) {
   794     assert((uint)n <= (uint)_num_participants, "oob");
   795     Method* fm = _found_methods[n];
   796     assert(n == _num_participants || fm != NULL, "proper usage");
   797     assert(fm == NULL || fm->method_holder() == _participants[n], "sanity");
   798     return fm;
   799   }
   801 #ifdef ASSERT
   802   // Assert that m is inherited into ctxk, without intervening overrides.
   803   // (May return true even if this is not true, in corner cases where we punt.)
   804   bool check_method_context(Klass* ctxk, Method* m) {
   805     if (m->method_holder() == ctxk)
   806       return true;  // Quick win.
   807     if (m->is_private())
   808       return false; // Quick lose.  Should not happen.
   809     if (!(m->is_public() || m->is_protected()))
   810       // The override story is complex when packages get involved.
   811       return true;  // Must punt the assertion to true.
   812     Klass* k = ctxk;
   813     Method* lm = k->lookup_method(m->name(), m->signature());
   814     if (lm == NULL && k->oop_is_instance()) {
   815       // It might be an interface method
   816         lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
   817                                                                 m->signature());
   818     }
   819     if (lm == m)
   820       // Method m is inherited into ctxk.
   821       return true;
   822     if (lm != NULL) {
   823       if (!(lm->is_public() || lm->is_protected())) {
   824         // Method is [package-]private, so the override story is complex.
   825         return true;  // Must punt the assertion to true.
   826       }
   827       if (lm->is_static()) {
   828         // Static methods don't override non-static so punt
   829         return true;
   830       }
   831       if (   !Dependencies::is_concrete_method(lm)
   832           && !Dependencies::is_concrete_method(m)
   833           && lm->method_holder()->is_subtype_of(m->method_holder()))
   834         // Method m is overridden by lm, but both are non-concrete.
   835         return true;
   836     }
   837     ResourceMark rm;
   838     tty->print_cr("Dependency method not found in the associated context:");
   839     tty->print_cr("  context = %s", ctxk->external_name());
   840     tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
   841     if (lm != NULL) {
   842       tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
   843     }
   844     return false;
   845   }
   846 #endif
   848   void add_participant(Klass* participant) {
   849     assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
   850     int np = _num_participants++;
   851     _participants[np] = participant;
   852     _participants[np+1] = NULL;
   853     _found_methods[np+1] = NULL;
   854   }
   856   void record_witnesses(int add) {
   857     if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
   858     assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
   859     _record_witnesses = add;
   860   }
   862   bool is_witness(Klass* k) {
   863     if (doing_subtype_search()) {
   864       return Dependencies::is_concrete_klass(k);
   865     } else {
   866       Method* m = InstanceKlass::cast(k)->find_method(_name, _signature);
   867       if (m == NULL || !Dependencies::is_concrete_method(m))  return false;
   868       _found_methods[_num_participants] = m;
   869       // Note:  If add_participant(k) is called,
   870       // the method m will already be memoized for it.
   871       return true;
   872     }
   873   }
   875   bool is_participant(Klass* k) {
   876     if (k == _participants[0]) {
   877       return true;
   878     } else if (_num_participants <= 1) {
   879       return false;
   880     } else {
   881       return in_list(k, &_participants[1]);
   882     }
   883   }
   884   bool ignore_witness(Klass* witness) {
   885     if (_record_witnesses == 0) {
   886       return false;
   887     } else {
   888       --_record_witnesses;
   889       add_participant(witness);
   890       return true;
   891     }
   892   }
   893   static bool in_list(Klass* x, Klass** list) {
   894     for (int i = 0; ; i++) {
   895       Klass* y = list[i];
   896       if (y == NULL)  break;
   897       if (y == x)  return true;
   898     }
   899     return false;  // not in list
   900   }
   902  private:
   903   // the actual search method:
   904   Klass* find_witness_anywhere(Klass* context_type,
   905                                  bool participants_hide_witnesses,
   906                                  bool top_level_call = true);
   907   // the spot-checking version:
   908   Klass* find_witness_in(KlassDepChange& changes,
   909                          Klass* context_type,
   910                            bool participants_hide_witnesses);
   911  public:
   912   Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
   913     assert(doing_subtype_search(), "must set up a subtype search");
   914     // When looking for unexpected concrete types,
   915     // do not look beneath expected ones.
   916     const bool participants_hide_witnesses = true;
   917     // CX > CC > C' is OK, even if C' is new.
   918     // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
   919     if (changes != NULL) {
   920       return find_witness_in(*changes, context_type, participants_hide_witnesses);
   921     } else {
   922       return find_witness_anywhere(context_type, participants_hide_witnesses);
   923     }
   924   }
   925   Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
   926     assert(!doing_subtype_search(), "must set up a method definer search");
   927     // When looking for unexpected concrete methods,
   928     // look beneath expected ones, to see if there are overrides.
   929     const bool participants_hide_witnesses = true;
   930     // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
   931     if (changes != NULL) {
   932       return find_witness_in(*changes, context_type, !participants_hide_witnesses);
   933     } else {
   934       return find_witness_anywhere(context_type, !participants_hide_witnesses);
   935     }
   936   }
   937 };
   939 #ifndef PRODUCT
   940 static int deps_find_witness_calls = 0;
   941 static int deps_find_witness_steps = 0;
   942 static int deps_find_witness_recursions = 0;
   943 static int deps_find_witness_singles = 0;
   944 static int deps_find_witness_print = 0; // set to -1 to force a final print
   945 static bool count_find_witness_calls() {
   946   if (TraceDependencies || LogCompilation) {
   947     int pcount = deps_find_witness_print + 1;
   948     bool final_stats      = (pcount == 0);
   949     bool initial_call     = (pcount == 1);
   950     bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
   951     if (pcount < 0)  pcount = 1; // crude overflow protection
   952     deps_find_witness_print = pcount;
   953     if (VerifyDependencies && initial_call) {
   954       tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
   955     }
   956     if (occasional_print || final_stats) {
   957       // Every now and then dump a little info about dependency searching.
   958       if (xtty != NULL) {
   959        ttyLocker ttyl;
   960        xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
   961                    deps_find_witness_calls,
   962                    deps_find_witness_steps,
   963                    deps_find_witness_recursions,
   964                    deps_find_witness_singles);
   965       }
   966       if (final_stats || (TraceDependencies && WizardMode)) {
   967         ttyLocker ttyl;
   968         tty->print_cr("Dependency check (find_witness) "
   969                       "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
   970                       deps_find_witness_calls,
   971                       deps_find_witness_steps,
   972                       (double)deps_find_witness_steps / deps_find_witness_calls,
   973                       deps_find_witness_recursions,
   974                       deps_find_witness_singles);
   975       }
   976     }
   977     return true;
   978   }
   979   return false;
   980 }
   981 #else
   982 #define count_find_witness_calls() (0)
   983 #endif //PRODUCT
   986 Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
   987                                                Klass* context_type,
   988                                                bool participants_hide_witnesses) {
   989   assert(changes.involves_context(context_type), "irrelevant dependency");
   990   Klass* new_type = changes.new_type();
   992   (void)count_find_witness_calls();
   993   NOT_PRODUCT(deps_find_witness_singles++);
   995   // Current thread must be in VM (not native mode, as in CI):
   996   assert(must_be_in_vm(), "raw oops here");
   997   // Must not move the class hierarchy during this check:
   998   assert_locked_or_safepoint(Compile_lock);
  1000   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
  1001   if (nof_impls > 1) {
  1002     // Avoid this case: *I.m > { A.m, C }; B.m > C
  1003     // %%% Until this is fixed more systematically, bail out.
  1004     // See corresponding comment in find_witness_anywhere.
  1005     return context_type;
  1008   assert(!is_participant(new_type), "only old classes are participants");
  1009   if (participants_hide_witnesses) {
  1010     // If the new type is a subtype of a participant, we are done.
  1011     for (int i = 0; i < num_participants(); i++) {
  1012       Klass* part = participant(i);
  1013       if (part == NULL)  continue;
  1014       assert(changes.involves_context(part) == new_type->is_subtype_of(part),
  1015              "correct marking of participants, b/c new_type is unique");
  1016       if (changes.involves_context(part)) {
  1017         // new guy is protected from this check by previous participant
  1018         return NULL;
  1023   if (is_witness(new_type) &&
  1024       !ignore_witness(new_type)) {
  1025     return new_type;
  1028   return NULL;
  1032 // Walk hierarchy under a context type, looking for unexpected types.
  1033 // Do not report participant types, and recursively walk beneath
  1034 // them only if participants_hide_witnesses is false.
  1035 // If top_level_call is false, skip testing the context type,
  1036 // because the caller has already considered it.
  1037 Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
  1038                                                      bool participants_hide_witnesses,
  1039                                                      bool top_level_call) {
  1040   // Current thread must be in VM (not native mode, as in CI):
  1041   assert(must_be_in_vm(), "raw oops here");
  1042   // Must not move the class hierarchy during this check:
  1043   assert_locked_or_safepoint(Compile_lock);
  1045   bool do_counts = count_find_witness_calls();
  1047   // Check the root of the sub-hierarchy first.
  1048   if (top_level_call) {
  1049     if (do_counts) {
  1050       NOT_PRODUCT(deps_find_witness_calls++);
  1051       NOT_PRODUCT(deps_find_witness_steps++);
  1053     if (is_participant(context_type)) {
  1054       if (participants_hide_witnesses)  return NULL;
  1055       // else fall through to search loop...
  1056     } else if (is_witness(context_type) && !ignore_witness(context_type)) {
  1057       // The context is an abstract class or interface, to start with.
  1058       return context_type;
  1062   // Now we must check each implementor and each subclass.
  1063   // Use a short worklist to avoid blowing the stack.
  1064   // Each worklist entry is a *chain* of subklass siblings to process.
  1065   const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
  1066   Klass* chains[CHAINMAX];
  1067   int    chaini = 0;  // index into worklist
  1068   Klass* chain;       // scratch variable
  1069 #define ADD_SUBCLASS_CHAIN(k)                     {  \
  1070     assert(chaini < CHAINMAX, "oob");                \
  1071     chain = InstanceKlass::cast(k)->subklass();      \
  1072     if (chain != NULL)  chains[chaini++] = chain;    }
  1074   // Look for non-abstract subclasses.
  1075   // (Note:  Interfaces do not have subclasses.)
  1076   ADD_SUBCLASS_CHAIN(context_type);
  1078   // If it is an interface, search its direct implementors.
  1079   // (Their subclasses are additional indirect implementors.
  1080   // See InstanceKlass::add_implementor.)
  1081   // (Note:  nof_implementors is always zero for non-interfaces.)
  1082   int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
  1083   if (nof_impls > 1) {
  1084     // Avoid this case: *I.m > { A.m, C }; B.m > C
  1085     // Here, I.m has 2 concrete implementations, but m appears unique
  1086     // as A.m, because the search misses B.m when checking C.
  1087     // The inherited method B.m was getting missed by the walker
  1088     // when interface 'I' was the starting point.
  1089     // %%% Until this is fixed more systematically, bail out.
  1090     // (Old CHA had the same limitation.)
  1091     return context_type;
  1093   if (nof_impls > 0) {
  1094     Klass* impl = InstanceKlass::cast(context_type)->implementor();
  1095     assert(impl != NULL, "just checking");
  1096     // If impl is the same as the context_type, then more than one
  1097     // implementor has seen. No exact info in this case.
  1098     if (impl == context_type) {
  1099       return context_type;  // report an inexact witness to this sad affair
  1101     if (do_counts)
  1102       { NOT_PRODUCT(deps_find_witness_steps++); }
  1103     if (is_participant(impl)) {
  1104       if (!participants_hide_witnesses) {
  1105         ADD_SUBCLASS_CHAIN(impl);
  1107     } else if (is_witness(impl) && !ignore_witness(impl)) {
  1108       return impl;
  1109     } else {
  1110       ADD_SUBCLASS_CHAIN(impl);
  1114   // Recursively process each non-trivial sibling chain.
  1115   while (chaini > 0) {
  1116     Klass* chain = chains[--chaini];
  1117     for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
  1118       if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
  1119       if (is_participant(sub)) {
  1120         if (participants_hide_witnesses)  continue;
  1121         // else fall through to process this guy's subclasses
  1122       } else if (is_witness(sub) && !ignore_witness(sub)) {
  1123         return sub;
  1125       if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
  1126         // Fast path.  (Partially disabled if VerifyDependencies.)
  1127         ADD_SUBCLASS_CHAIN(sub);
  1128       } else {
  1129         // Worklist overflow.  Do a recursive call.  Should be rare.
  1130         // The recursive call will have its own worklist, of course.
  1131         // (Note that sub has already been tested, so that there is
  1132         // no need for the recursive call to re-test.  That's handy,
  1133         // since the recursive call sees sub as the context_type.)
  1134         if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
  1135         Klass* witness = find_witness_anywhere(sub,
  1136                                                  participants_hide_witnesses,
  1137                                                  /*top_level_call=*/ false);
  1138         if (witness != NULL)  return witness;
  1143   // No witness found.  The dependency remains unbroken.
  1144   return NULL;
  1145 #undef ADD_SUBCLASS_CHAIN
  1149 bool Dependencies::is_concrete_klass(Klass* k) {
  1150   if (k->is_abstract())  return false;
  1151   // %%% We could treat classes which are concrete but
  1152   // have not yet been instantiated as virtually abstract.
  1153   // This would require a deoptimization barrier on first instantiation.
  1154   //if (k->is_not_instantiated())  return false;
  1155   return true;
  1158 bool Dependencies::is_concrete_method(Method* m) {
  1159   // Statics are irrelevant to virtual call sites.
  1160   if (m->is_static())  return false;
  1162   // We could also return false if m does not yet appear to be
  1163   // executed, if the VM version supports this distinction also.
  1164   return !m->is_abstract() &&
  1165          !InstanceKlass::cast(m->method_holder())->is_interface();
  1166          // TODO: investigate whether default methods should be
  1167          // considered as "concrete" in this situation.  For now they
  1168          // are not.
  1172 Klass* Dependencies::find_finalizable_subclass(Klass* k) {
  1173   if (k->is_interface())  return NULL;
  1174   if (k->has_finalizer()) return k;
  1175   k = k->subklass();
  1176   while (k != NULL) {
  1177     Klass* result = find_finalizable_subclass(k);
  1178     if (result != NULL) return result;
  1179     k = k->next_sibling();
  1181   return NULL;
  1185 bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
  1186   if (k->is_abstract())  return false;
  1187   // We could also return false if k does not yet appear to be
  1188   // instantiated, if the VM version supports this distinction also.
  1189   //if (k->is_not_instantiated())  return false;
  1190   return true;
  1193 bool Dependencies::is_concrete_method(ciMethod* m) {
  1194   // Statics are irrelevant to virtual call sites.
  1195   if (m->is_static())  return false;
  1197   // We could also return false if m does not yet appear to be
  1198   // executed, if the VM version supports this distinction also.
  1199   return !m->is_abstract();
  1203 bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
  1204   return k->has_finalizable_subclass();
  1208 // Any use of the contents (bytecodes) of a method must be
  1209 // marked by an "evol_method" dependency, if those contents
  1210 // can change.  (Note: A method is always dependent on itself.)
  1211 Klass* Dependencies::check_evol_method(Method* m) {
  1212   assert(must_be_in_vm(), "raw oops here");
  1213   // Did somebody do a JVMTI RedefineClasses while our backs were turned?
  1214   // Or is there a now a breakpoint?
  1215   // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
  1216   if (m->is_old()
  1217       || m->number_of_breakpoints() > 0) {
  1218     return m->method_holder();
  1219   } else {
  1220     return NULL;
  1224 // This is a strong assertion:  It is that the given type
  1225 // has no subtypes whatever.  It is most useful for
  1226 // optimizing checks on reflected types or on array types.
  1227 // (Checks on types which are derived from real instances
  1228 // can be optimized more strongly than this, because we
  1229 // know that the checked type comes from a concrete type,
  1230 // and therefore we can disregard abstract types.)
  1231 Klass* Dependencies::check_leaf_type(Klass* ctxk) {
  1232   assert(must_be_in_vm(), "raw oops here");
  1233   assert_locked_or_safepoint(Compile_lock);
  1234   InstanceKlass* ctx = InstanceKlass::cast(ctxk);
  1235   Klass* sub = ctx->subklass();
  1236   if (sub != NULL) {
  1237     return sub;
  1238   } else if (ctx->nof_implementors() != 0) {
  1239     // if it is an interface, it must be unimplemented
  1240     // (if it is not an interface, nof_implementors is always zero)
  1241     Klass* impl = ctx->implementor();
  1242     assert(impl != NULL, "must be set");
  1243     return impl;
  1244   } else {
  1245     return NULL;
  1249 // Test the assertion that conck is the only concrete subtype* of ctxk.
  1250 // The type conck itself is allowed to have have further concrete subtypes.
  1251 // This allows the compiler to narrow occurrences of ctxk by conck,
  1252 // when dealing with the types of actual instances.
  1253 Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
  1254                                                                    Klass* conck,
  1255                                                                    KlassDepChange* changes) {
  1256   ClassHierarchyWalker wf(conck);
  1257   return wf.find_witness_subtype(ctxk, changes);
  1260 // If a non-concrete class has no concrete subtypes, it is not (yet)
  1261 // instantiatable.  This can allow the compiler to make some paths go
  1262 // dead, if they are gated by a test of the type.
  1263 Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
  1264                                                                KlassDepChange* changes) {
  1265   // Find any concrete subtype, with no participants:
  1266   ClassHierarchyWalker wf;
  1267   return wf.find_witness_subtype(ctxk, changes);
  1271 // If a concrete class has no concrete subtypes, it can always be
  1272 // exactly typed.  This allows the use of a cheaper type test.
  1273 Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
  1274                                                                KlassDepChange* changes) {
  1275   // Find any concrete subtype, with only the ctxk as participant:
  1276   ClassHierarchyWalker wf(ctxk);
  1277   return wf.find_witness_subtype(ctxk, changes);
  1281 // Find the unique concrete proper subtype of ctxk, or NULL if there
  1282 // is more than one concrete proper subtype.  If there are no concrete
  1283 // proper subtypes, return ctxk itself, whether it is concrete or not.
  1284 // The returned subtype is allowed to have have further concrete subtypes.
  1285 // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
  1286 Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
  1287   ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
  1288   wf.record_witnesses(1);          // Record one other witness when walking.
  1289   Klass* wit = wf.find_witness_subtype(ctxk);
  1290   if (wit != NULL)  return NULL;   // Too many witnesses.
  1291   Klass* conck = wf.participant(0);
  1292   if (conck == NULL) {
  1293 #ifndef PRODUCT
  1294     // Make sure the dependency mechanism will pass this discovery:
  1295     if (VerifyDependencies) {
  1296       // Turn off dependency tracing while actually testing deps.
  1297       FlagSetting fs(TraceDependencies, false);
  1298       if (!Dependencies::is_concrete_klass(ctxk)) {
  1299         guarantee(NULL ==
  1300                   (void *)check_abstract_with_no_concrete_subtype(ctxk),
  1301                   "verify dep.");
  1302       } else {
  1303         guarantee(NULL ==
  1304                   (void *)check_concrete_with_no_concrete_subtype(ctxk),
  1305                   "verify dep.");
  1308 #endif //PRODUCT
  1309     return ctxk;                   // Return ctxk as a flag for "no subtypes".
  1310   } else {
  1311 #ifndef PRODUCT
  1312     // Make sure the dependency mechanism will pass this discovery:
  1313     if (VerifyDependencies) {
  1314       // Turn off dependency tracing while actually testing deps.
  1315       FlagSetting fs(TraceDependencies, false);
  1316       if (!Dependencies::is_concrete_klass(ctxk)) {
  1317         guarantee(NULL == (void *)
  1318                   check_abstract_with_unique_concrete_subtype(ctxk, conck),
  1319                   "verify dep.");
  1322 #endif //PRODUCT
  1323     return conck;
  1327 // Test the assertion that the k[12] are the only concrete subtypes of ctxk,
  1328 // except possibly for further subtypes of k[12] themselves.
  1329 // The context type must be abstract.  The types k1 and k2 are themselves
  1330 // allowed to have further concrete subtypes.
  1331 Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
  1332                                                 Klass* ctxk,
  1333                                                 Klass* k1,
  1334                                                 Klass* k2,
  1335                                                 KlassDepChange* changes) {
  1336   ClassHierarchyWalker wf;
  1337   wf.add_participant(k1);
  1338   wf.add_participant(k2);
  1339   return wf.find_witness_subtype(ctxk, changes);
  1342 // Search ctxk for concrete implementations.  If there are klen or fewer,
  1343 // pack them into the given array and return the number.
  1344 // Otherwise, return -1, meaning the given array would overflow.
  1345 // (Note that a return of 0 means there are exactly no concrete subtypes.)
  1346 // In this search, if ctxk is concrete, it will be reported alone.
  1347 // For any type CC reported, no proper subtypes of CC will be reported.
  1348 int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
  1349                                                    int klen,
  1350                                                    Klass* karray[]) {
  1351   ClassHierarchyWalker wf;
  1352   wf.record_witnesses(klen);
  1353   Klass* wit = wf.find_witness_subtype(ctxk);
  1354   if (wit != NULL)  return -1;  // Too many witnesses.
  1355   int num = wf.num_participants();
  1356   assert(num <= klen, "oob");
  1357   // Pack the result array with the good news.
  1358   for (int i = 0; i < num; i++)
  1359     karray[i] = wf.participant(i);
  1360 #ifndef PRODUCT
  1361   // Make sure the dependency mechanism will pass this discovery:
  1362   if (VerifyDependencies) {
  1363     // Turn off dependency tracing while actually testing deps.
  1364     FlagSetting fs(TraceDependencies, false);
  1365     switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
  1366     case -1: // ctxk was itself concrete
  1367       guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
  1368       break;
  1369     case 0:
  1370       guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
  1371                 "verify dep.");
  1372       break;
  1373     case 1:
  1374       guarantee(NULL == (void *)
  1375                 check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
  1376                 "verify dep.");
  1377       break;
  1378     case 2:
  1379       guarantee(NULL == (void *)
  1380                 check_abstract_with_exclusive_concrete_subtypes(ctxk,
  1381                                                                 karray[0],
  1382                                                                 karray[1]),
  1383                 "verify dep.");
  1384       break;
  1385     default:
  1386       ShouldNotReachHere();  // klen > 2 yet supported
  1389 #endif //PRODUCT
  1390   return num;
  1393 // If a class (or interface) has a unique concrete method uniqm, return NULL.
  1394 // Otherwise, return a class that contains an interfering method.
  1395 Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
  1396                                                     KlassDepChange* changes) {
  1397   // Here is a missing optimization:  If uniqm->is_final(),
  1398   // we don't really need to search beneath it for overrides.
  1399   // This is probably not important, since we don't use dependencies
  1400   // to track final methods.  (They can't be "definalized".)
  1401   ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
  1402   return wf.find_witness_definer(ctxk, changes);
  1405 // Find the set of all non-abstract methods under ctxk that match m.
  1406 // (The method m must be defined or inherited in ctxk.)
  1407 // Include m itself in the set, unless it is abstract.
  1408 // If this set has exactly one element, return that element.
  1409 Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
  1410   ClassHierarchyWalker wf(m);
  1411   assert(wf.check_method_context(ctxk, m), "proper context");
  1412   wf.record_witnesses(1);
  1413   Klass* wit = wf.find_witness_definer(ctxk);
  1414   if (wit != NULL)  return NULL;  // Too many witnesses.
  1415   Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
  1416   if (Dependencies::is_concrete_method(m)) {
  1417     if (fm == NULL) {
  1418       // It turns out that m was always the only implementation.
  1419       fm = m;
  1420     } else if (fm != m) {
  1421       // Two conflicting implementations after all.
  1422       // (This can happen if m is inherited into ctxk and fm overrides it.)
  1423       return NULL;
  1426 #ifndef PRODUCT
  1427   // Make sure the dependency mechanism will pass this discovery:
  1428   if (VerifyDependencies && fm != NULL) {
  1429     guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
  1430               "verify dep.");
  1432 #endif //PRODUCT
  1433   return fm;
  1436 Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
  1437                                                         Method* m1,
  1438                                                         Method* m2,
  1439                                                         KlassDepChange* changes) {
  1440   ClassHierarchyWalker wf(m1);
  1441   wf.add_participant(m1->method_holder());
  1442   wf.add_participant(m2->method_holder());
  1443   return wf.find_witness_definer(ctxk, changes);
  1446 // Find the set of all non-abstract methods under ctxk that match m[0].
  1447 // (The method m[0] must be defined or inherited in ctxk.)
  1448 // Include m itself in the set, unless it is abstract.
  1449 // Fill the given array m[0..(mlen-1)] with this set, and return the length.
  1450 // (The length may be zero if no concrete methods are found anywhere.)
  1451 // If there are too many concrete methods to fit in marray, return -1.
  1452 int Dependencies::find_exclusive_concrete_methods(Klass* ctxk,
  1453                                                   int mlen,
  1454                                                   Method* marray[]) {
  1455   Method* m0 = marray[0];
  1456   ClassHierarchyWalker wf(m0);
  1457   assert(wf.check_method_context(ctxk, m0), "proper context");
  1458   wf.record_witnesses(mlen);
  1459   bool participants_hide_witnesses = true;
  1460   Klass* wit = wf.find_witness_definer(ctxk);
  1461   if (wit != NULL)  return -1;  // Too many witnesses.
  1462   int num = wf.num_participants();
  1463   assert(num <= mlen, "oob");
  1464   // Keep track of whether m is also part of the result set.
  1465   int mfill = 0;
  1466   assert(marray[mfill] == m0, "sanity");
  1467   if (Dependencies::is_concrete_method(m0))
  1468     mfill++;  // keep m0 as marray[0], the first result
  1469   for (int i = 0; i < num; i++) {
  1470     Method* fm = wf.found_method(i);
  1471     if (fm == m0)  continue;  // Already put this guy in the list.
  1472     if (mfill == mlen) {
  1473       return -1;              // Oops.  Too many methods after all!
  1475     marray[mfill++] = fm;
  1477 #ifndef PRODUCT
  1478   // Make sure the dependency mechanism will pass this discovery:
  1479   if (VerifyDependencies) {
  1480     // Turn off dependency tracing while actually testing deps.
  1481     FlagSetting fs(TraceDependencies, false);
  1482     switch (mfill) {
  1483     case 1:
  1484       guarantee(NULL == (void *)check_unique_concrete_method(ctxk, marray[0]),
  1485                 "verify dep.");
  1486       break;
  1487     case 2:
  1488       guarantee(NULL == (void *)
  1489                 check_exclusive_concrete_methods(ctxk, marray[0], marray[1]),
  1490                 "verify dep.");
  1491       break;
  1492     default:
  1493       ShouldNotReachHere();  // mlen > 2 yet supported
  1496 #endif //PRODUCT
  1497   return mfill;
  1501 Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
  1502   Klass* search_at = ctxk;
  1503   if (changes != NULL)
  1504     search_at = changes->new_type(); // just look at the new bit
  1505   return find_finalizable_subclass(search_at);
  1509 Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
  1510   assert(call_site    ->is_a(SystemDictionary::CallSite_klass()),     "sanity");
  1511   assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity");
  1512   if (changes == NULL) {
  1513     // Validate all CallSites
  1514     if (java_lang_invoke_CallSite::target(call_site) != method_handle)
  1515       return call_site->klass();  // assertion failed
  1516   } else {
  1517     // Validate the given CallSite
  1518     if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
  1519       assert(method_handle != changes->method_handle(), "must be");
  1520       return call_site->klass();  // assertion failed
  1523   return NULL;  // assertion still valid
  1527 void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
  1528   if (witness != NULL) {
  1529     if (TraceDependencies) {
  1530       print_dependency(witness, /*verbose=*/ true);
  1532     // The following is a no-op unless logging is enabled:
  1533     log_dependency(witness);
  1538 Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
  1539   assert_locked_or_safepoint(Compile_lock);
  1540   Dependencies::check_valid_dependency_type(type());
  1542   Klass* witness = NULL;
  1543   switch (type()) {
  1544   case evol_method:
  1545     witness = check_evol_method(method_argument(0));
  1546     break;
  1547   case leaf_type:
  1548     witness = check_leaf_type(context_type());
  1549     break;
  1550   case abstract_with_unique_concrete_subtype:
  1551     witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
  1552     break;
  1553   case abstract_with_no_concrete_subtype:
  1554     witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
  1555     break;
  1556   case concrete_with_no_concrete_subtype:
  1557     witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
  1558     break;
  1559   case unique_concrete_method:
  1560     witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
  1561     break;
  1562   case abstract_with_exclusive_concrete_subtypes_2:
  1563     witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
  1564     break;
  1565   case exclusive_concrete_methods_2:
  1566     witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
  1567     break;
  1568   case no_finalizable_subclasses:
  1569     witness = check_has_no_finalizable_subclasses(context_type(), changes);
  1570     break;
  1571   default:
  1572     witness = NULL;
  1573     break;
  1575   trace_and_log_witness(witness);
  1576   return witness;
  1580 Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
  1581   assert_locked_or_safepoint(Compile_lock);
  1582   Dependencies::check_valid_dependency_type(type());
  1584   Klass* witness = NULL;
  1585   switch (type()) {
  1586   case call_site_target_value:
  1587     witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
  1588     break;
  1589   default:
  1590     witness = NULL;
  1591     break;
  1593   trace_and_log_witness(witness);
  1594   return witness;
  1598 Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
  1599   // Handle klass dependency
  1600   if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
  1601     return check_klass_dependency(changes.as_klass_change());
  1603   // Handle CallSite dependency
  1604   if (changes.is_call_site_change())
  1605     return check_call_site_dependency(changes.as_call_site_change());
  1607   // irrelevant dependency; skip it
  1608   return NULL;
  1612 void DepChange::print() {
  1613   int nsup = 0, nint = 0;
  1614   for (ContextStream str(*this); str.next(); ) {
  1615     Klass* k = str.klass();
  1616     switch (str.change_type()) {
  1617     case Change_new_type:
  1618       tty->print_cr("  dependee = %s", InstanceKlass::cast(k)->external_name());
  1619       break;
  1620     case Change_new_sub:
  1621       if (!WizardMode) {
  1622         ++nsup;
  1623       } else {
  1624         tty->print_cr("  context super = %s", InstanceKlass::cast(k)->external_name());
  1626       break;
  1627     case Change_new_impl:
  1628       if (!WizardMode) {
  1629         ++nint;
  1630       } else {
  1631         tty->print_cr("  context interface = %s", InstanceKlass::cast(k)->external_name());
  1633       break;
  1636   if (nsup + nint != 0) {
  1637     tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
  1641 void DepChange::ContextStream::start() {
  1642   Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
  1643   _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
  1644   _klass = new_type;
  1645   _ti_base = NULL;
  1646   _ti_index = 0;
  1647   _ti_limit = 0;
  1650 bool DepChange::ContextStream::next() {
  1651   switch (_change_type) {
  1652   case Start_Klass:             // initial state; _klass is the new type
  1653     _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
  1654     _ti_index = 0;
  1655     _change_type = Change_new_type;
  1656     return true;
  1657   case Change_new_type:
  1658     // fall through:
  1659     _change_type = Change_new_sub;
  1660   case Change_new_sub:
  1661     // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
  1663       _klass = InstanceKlass::cast(_klass)->super();
  1664       if (_klass != NULL) {
  1665         return true;
  1668     // else set up _ti_limit and fall through:
  1669     _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
  1670     _change_type = Change_new_impl;
  1671   case Change_new_impl:
  1672     if (_ti_index < _ti_limit) {
  1673       _klass = _ti_base->at(_ti_index++);
  1674       return true;
  1676     // fall through:
  1677     _change_type = NO_CHANGE;  // iterator is exhausted
  1678   case NO_CHANGE:
  1679     break;
  1680   default:
  1681     ShouldNotReachHere();
  1683   return false;
  1686 void KlassDepChange::initialize() {
  1687   // entire transaction must be under this lock:
  1688   assert_lock_strong(Compile_lock);
  1690   // Mark all dependee and all its superclasses
  1691   // Mark transitive interfaces
  1692   for (ContextStream str(*this); str.next(); ) {
  1693     Klass* d = str.klass();
  1694     assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
  1695     InstanceKlass::cast(d)->set_is_marked_dependent(true);
  1699 KlassDepChange::~KlassDepChange() {
  1700   // Unmark all dependee and all its superclasses
  1701   // Unmark transitive interfaces
  1702   for (ContextStream str(*this); str.next(); ) {
  1703     Klass* d = str.klass();
  1704     InstanceKlass::cast(d)->set_is_marked_dependent(false);
  1708 bool KlassDepChange::involves_context(Klass* k) {
  1709   if (k == NULL || !k->oop_is_instance()) {
  1710     return false;
  1712   InstanceKlass* ik = InstanceKlass::cast(k);
  1713   bool is_contained = ik->is_marked_dependent();
  1714   assert(is_contained == new_type()->is_subtype_of(k),
  1715          "correct marking of potential context types");
  1716   return is_contained;
  1719 #ifndef PRODUCT
  1720 void Dependencies::print_statistics() {
  1721   if (deps_find_witness_print != 0) {
  1722     // Call one final time, to flush out the data.
  1723     deps_find_witness_print = -1;
  1724     count_find_witness_calls();
  1727 #endif

mercurial