src/share/vm/opto/bytecodeInfo.cpp

Wed, 03 Jun 2015 14:22:57 +0200

author
roland
date
Wed, 03 Jun 2015 14:22:57 +0200
changeset 7859
c1c199dde5c9
parent 7854
e8260b6328fb
child 7994
04ff2f6cd0eb
child 9858
b985cbb00e68
permissions
-rw-r--r--

8077504: Unsafe load can loose control dependency and cause crash
Summary: Node::depends_only_on_test() should return false for Unsafe loads
Reviewed-by: kvn, adinn

duke@435 1 /*
mikael@6198 2 * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
minqi@4267 26 #include "ci/ciReplay.hpp"
stefank@2314 27 #include "classfile/systemDictionary.hpp"
stefank@2314 28 #include "classfile/vmSymbols.hpp"
twisti@2687 29 #include "compiler/compileBroker.hpp"
stefank@2314 30 #include "compiler/compileLog.hpp"
stefank@2314 31 #include "interpreter/linkResolver.hpp"
stefank@2314 32 #include "oops/objArrayKlass.hpp"
stefank@2314 33 #include "opto/callGenerator.hpp"
stefank@2314 34 #include "opto/parse.hpp"
stefank@2314 35 #include "runtime/handles.inline.hpp"
duke@435 36
duke@435 37 //=============================================================================
duke@435 38 //------------------------------InlineTree-------------------------------------
never@2981 39 InlineTree::InlineTree(Compile* c,
never@2981 40 const InlineTree *caller_tree, ciMethod* callee,
never@2981 41 JVMState* caller_jvms, int caller_bci,
never@2981 42 float site_invoke_ratio, int max_inline_level) :
never@2981 43 C(c),
never@2981 44 _caller_jvms(caller_jvms),
never@2981 45 _caller_tree((InlineTree*) caller_tree),
never@2981 46 _method(callee),
never@2981 47 _site_invoke_ratio(site_invoke_ratio),
never@2981 48 _max_inline_level(max_inline_level),
roland@4409 49 _count_inline_bcs(method()->code_size_for_inlining()),
iignatyev@4660 50 _subtrees(c->comp_arena(), 2, 0, NULL),
iignatyev@4660 51 _msg(NULL)
jrose@1592 52 {
kvn@6217 53 #ifndef PRODUCT
kvn@6217 54 _count_inlines = 0;
kvn@6217 55 _forced_inline = false;
kvn@6217 56 #endif
duke@435 57 if (_caller_jvms != NULL) {
duke@435 58 // Keep a private copy of the caller_jvms:
duke@435 59 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
duke@435 60 _caller_jvms->set_bci(caller_jvms->bci());
cfang@1335 61 assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
duke@435 62 }
duke@435 63 assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
jrose@1592 64 assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
duke@435 65 assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
shade@6314 66 // Update hierarchical counts, count_inline_bcs() and count_inlines()
shade@6314 67 InlineTree *caller = (InlineTree *)caller_tree;
shade@6314 68 for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
shade@6314 69 caller->_count_inline_bcs += count_inline_bcs();
shade@6314 70 NOT_PRODUCT(caller->_count_inlines++;)
duke@435 71 }
duke@435 72 }
duke@435 73
kvn@5113 74 /**
kvn@5113 75 * Return true when EA is ON and a java constructor is called or
kvn@5113 76 * a super constructor is called from an inlined java constructor.
kvn@5113 77 * Also return true for boxing methods.
kvn@5113 78 */
kvn@476 79 static bool is_init_with_ea(ciMethod* callee_method,
kvn@476 80 ciMethod* caller_method, Compile* C) {
kvn@5113 81 if (!C->do_escape_analysis() || !EliminateAllocations) {
kvn@5113 82 return false; // EA is off
kvn@5113 83 }
kvn@5113 84 if (callee_method->is_initializer()) {
kvn@5113 85 return true; // constuctor
kvn@5113 86 }
kvn@5113 87 if (caller_method->is_initializer() &&
kvn@5113 88 caller_method != C->method() &&
kvn@5113 89 caller_method->holder()->is_subclass_of(callee_method->holder())) {
kvn@5113 90 return true; // super constructor is called from inlined constructor
kvn@5113 91 }
kvn@5113 92 if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
kvn@5113 93 return true;
kvn@5113 94 }
kvn@5113 95 return false;
kvn@476 96 }
kvn@476 97
kvn@5113 98 /**
kvn@5113 99 * Force inlining unboxing accessor.
kvn@5113 100 */
kvn@5110 101 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
kvn@5110 102 return C->eliminate_boxing() && callee_method->is_unboxing_method();
kvn@5110 103 }
kvn@5110 104
iignatyev@4660 105 // positive filter: should callee be inlined?
iignatyev@4660 106 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
iignatyev@4660 107 int caller_bci, ciCallProfile& profile,
iignatyev@4660 108 WarmCallInfo* wci_result) {
duke@435 109 // Allows targeted inlining
vlivanov@7182 110 if (callee_method->should_inline()) {
duke@435 111 *wci_result = *(WarmCallInfo::always_hot());
kvn@5763 112 if (C->print_inlining() && Verbose) {
never@2981 113 CompileTask::print_inline_indent(inline_level());
duke@435 114 tty->print_cr("Inlined method is hot: ");
duke@435 115 }
iignatyev@4660 116 set_msg("force inline by CompilerOracle");
kvn@6217 117 _forced_inline = true;
iignatyev@4660 118 return true;
duke@435 119 }
duke@435 120
vlivanov@7182 121 if (callee_method->force_inline()) {
vlivanov@7182 122 set_msg("force inline by annotation");
vlivanov@7182 123 _forced_inline = true;
vlivanov@7182 124 return true;
vlivanov@7182 125 }
vlivanov@7182 126
kvn@6217 127 #ifndef PRODUCT
kvn@6217 128 int inline_depth = inline_level()+1;
kvn@6217 129 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
kvn@6217 130 set_msg("force inline by ciReplay");
kvn@6217 131 _forced_inline = true;
kvn@6217 132 return true;
kvn@6217 133 }
kvn@6217 134 #endif
kvn@6217 135
twisti@3097 136 int size = callee_method->code_size_for_inlining();
duke@435 137
duke@435 138 // Check for too many throws (and not too huge)
kvn@476 139 if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
kvn@476 140 size < InlineThrowMaxSize ) {
duke@435 141 wci_result->set_profit(wci_result->profit() * 100);
kvn@5763 142 if (C->print_inlining() && Verbose) {
never@2981 143 CompileTask::print_inline_indent(inline_level());
duke@435 144 tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
duke@435 145 }
iignatyev@4660 146 set_msg("many throws");
iignatyev@4660 147 return true;
duke@435 148 }
duke@435 149
twisti@2898 150 int default_max_inline_size = C->max_inline_size();
twisti@2898 151 int inline_small_code_size = InlineSmallCode / 4;
twisti@2898 152 int max_inline_size = default_max_inline_size;
twisti@2898 153
duke@435 154 int call_site_count = method()->scale_count(profile.count());
duke@435 155 int invoke_count = method()->interpreter_invocation_count();
twisti@2898 156
twisti@2898 157 assert(invoke_count != 0, "require invocation count greater than zero");
twisti@2898 158 int freq = call_site_count / invoke_count;
kvn@476 159
duke@435 160 // bump the max size if the call is frequent
kvn@476 161 if ((freq >= InlineFrequencyRatio) ||
kvn@476 162 (call_site_count >= InlineFrequencyCount) ||
kvn@5110 163 is_unboxing_method(callee_method, C) ||
kvn@476 164 is_init_with_ea(callee_method, caller_method, C)) {
kvn@476 165
twisti@2898 166 max_inline_size = C->freq_inline_size();
twisti@2898 167 if (size <= max_inline_size && TraceFrequencyInlining) {
never@2981 168 CompileTask::print_inline_indent(inline_level());
duke@435 169 tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);
never@2981 170 CompileTask::print_inline_indent(inline_level());
duke@435 171 callee_method->print();
duke@435 172 tty->cr();
duke@435 173 }
duke@435 174 } else {
duke@435 175 // Not hot. Check for medium-sized pre-existing nmethod at cold sites.
kvn@476 176 if (callee_method->has_compiled_code() &&
kvn@4772 177 callee_method->instructions_size() > inline_small_code_size) {
iignatyev@4660 178 set_msg("already compiled into a medium method");
iignatyev@4660 179 return false;
kvn@4772 180 }
duke@435 181 }
twisti@2898 182 if (size > max_inline_size) {
iignatyev@4660 183 if (max_inline_size > default_max_inline_size) {
iignatyev@4660 184 set_msg("hot method too big");
iignatyev@4660 185 } else {
iignatyev@4660 186 set_msg("too big");
iignatyev@4660 187 }
iignatyev@4660 188 return false;
duke@435 189 }
iignatyev@4660 190 return true;
duke@435 191 }
duke@435 192
duke@435 193
iignatyev@4660 194 // negative filter: should callee NOT be inlined?
iignatyev@4660 195 bool InlineTree::should_not_inline(ciMethod *callee_method,
iignatyev@4660 196 ciMethod* caller_method,
twisti@5901 197 JVMState* jvms,
iignatyev@4660 198 WarmCallInfo* wci_result) {
iignatyev@4660 199
iignatyev@4660 200 const char* fail_msg = NULL;
iignatyev@4660 201
iignatyev@4660 202 // First check all inlining restrictions which are required for correctness
iignatyev@4660 203 if ( callee_method->is_abstract()) {
iignatyev@4660 204 fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
iignatyev@4660 205 } else if (!callee_method->holder()->is_initialized()) {
iignatyev@4660 206 fail_msg = "method holder not initialized";
iignatyev@4660 207 } else if ( callee_method->is_native()) {
iignatyev@4660 208 fail_msg = "native method";
iignatyev@4660 209 } else if ( callee_method->dont_inline()) {
iignatyev@4660 210 fail_msg = "don't inline by annotation";
iignatyev@4660 211 }
iignatyev@4660 212
iignatyev@4660 213 // one more inlining restriction
iignatyev@4660 214 if (fail_msg == NULL && callee_method->has_unloaded_classes_in_signature()) {
iignatyev@4660 215 fail_msg = "unloaded signature classes";
iignatyev@4660 216 }
twisti@1573 217
iignatyev@4660 218 if (fail_msg != NULL) {
iignatyev@4660 219 set_msg(fail_msg);
iignatyev@4660 220 return true;
iignatyev@4660 221 }
iignatyev@4660 222
iignatyev@4660 223 // ignore heuristic controls on inlining
roland@4409 224 if (callee_method->should_inline()) {
iignatyev@4660 225 set_msg("force inline by CompilerOracle");
iignatyev@4660 226 return false;
duke@435 227 }
duke@435 228
kvn@5110 229 if (callee_method->should_not_inline()) {
kvn@5110 230 set_msg("disallowed by CompilerOracle");
kvn@5110 231 return true;
kvn@5110 232 }
kvn@5110 233
kvn@5110 234 #ifndef PRODUCT
kvn@6217 235 int caller_bci = jvms->bci();
kvn@6217 236 int inline_depth = inline_level()+1;
kvn@6217 237 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
kvn@6217 238 set_msg("force inline by ciReplay");
kvn@6217 239 return false;
kvn@6217 240 }
kvn@6217 241
kvn@6217 242 if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
kvn@6217 243 set_msg("disallowed by ciReplay");
kvn@6217 244 return true;
kvn@6217 245 }
kvn@6217 246
kvn@5110 247 if (ciReplay::should_not_inline(callee_method)) {
kvn@5110 248 set_msg("disallowed by ciReplay");
kvn@5110 249 return true;
kvn@5110 250 }
kvn@5110 251 #endif
kvn@5110 252
vlivanov@7182 253 if (callee_method->force_inline()) {
vlivanov@7182 254 set_msg("force inline by annotation");
vlivanov@7182 255 return false;
vlivanov@7182 256 }
vlivanov@7182 257
duke@435 258 // Now perform checks which are heuristic
duke@435 259
kvn@5110 260 if (is_unboxing_method(callee_method, C)) {
kvn@5110 261 // Inline unboxing methods.
kvn@5110 262 return false;
kvn@5110 263 }
kvn@5110 264
vlivanov@7182 265 if (callee_method->has_compiled_code() &&
vlivanov@7182 266 callee_method->instructions_size() > InlineSmallCode) {
vlivanov@7182 267 set_msg("already compiled into a big method");
vlivanov@7182 268 return true;
twisti@3969 269 }
duke@435 270
duke@435 271 // don't inline exception code unless the top method belongs to an
duke@435 272 // exception class
duke@435 273 if (caller_tree() != NULL &&
duke@435 274 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
duke@435 275 const InlineTree *top = this;
duke@435 276 while (top->caller_tree() != NULL) top = top->caller_tree();
duke@435 277 ciInstanceKlass* k = top->method()->holder();
iignatyev@4660 278 if (!k->is_subclass_of(C->env()->Throwable_klass())) {
iignatyev@4660 279 set_msg("exception method");
iignatyev@4660 280 return true;
iignatyev@4660 281 }
duke@435 282 }
duke@435 283
duke@435 284 // use frequency-based objections only for non-trivial methods
iignatyev@4660 285 if (callee_method->code_size() <= MaxTrivialSize) {
iignatyev@4660 286 return false;
iignatyev@4660 287 }
kvn@476 288
kvn@476 289 // don't use counts with -Xcomp or CTW
kvn@476 290 if (UseInterpreter && !CompileTheWorld) {
kvn@476 291
kvn@476 292 if (!callee_method->has_compiled_code() &&
kvn@476 293 !callee_method->was_executed_more_than(0)) {
iignatyev@4660 294 set_msg("never executed");
iignatyev@4660 295 return true;
kvn@476 296 }
kvn@476 297
kvn@476 298 if (is_init_with_ea(callee_method, caller_method, C)) {
kvn@476 299 // Escape Analysis: inline all executed constructors
kvn@5110 300 return false;
kvn@476 301 } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
kvn@476 302 CompileThreshold >> 1))) {
iignatyev@4660 303 set_msg("executed < MinInliningThreshold times");
iignatyev@4660 304 return true;
kvn@476 305 }
duke@435 306 }
duke@435 307
iignatyev@4660 308 return false;
duke@435 309 }
duke@435 310
duke@435 311 //-----------------------------try_to_inline-----------------------------------
iignatyev@4660 312 // return true if ok
duke@435 313 // Relocated from "InliningClosure::try_to_inline"
iignatyev@4660 314 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
twisti@5901 315 int caller_bci, JVMState* jvms, ciCallProfile& profile,
iignatyev@4660 316 WarmCallInfo* wci_result, bool& should_delay) {
iignatyev@4660 317
shade@6314 318 if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
roland@4409 319 if (!callee_method->force_inline() || !IncrementalInline) {
iignatyev@4660 320 set_msg("size > DesiredMethodLimit");
iignatyev@4660 321 return false;
roland@4409 322 } else if (!C->inlining_incrementally()) {
roland@4409 323 should_delay = true;
roland@4409 324 }
duke@435 325 }
duke@435 326
kvn@6217 327 _forced_inline = false; // Reset
iignatyev@4660 328 if (!should_inline(callee_method, caller_method, caller_bci, profile,
iignatyev@4660 329 wci_result)) {
iignatyev@4660 330 return false;
iignatyev@4660 331 }
twisti@5901 332 if (should_not_inline(callee_method, caller_method, jvms, wci_result)) {
iignatyev@4660 333 return false;
iignatyev@4660 334 }
duke@435 335
jrose@1592 336 if (InlineAccessors && callee_method->is_accessor()) {
jrose@1592 337 // accessor methods are not subject to any of the following limits.
iignatyev@4660 338 set_msg("accessor");
iignatyev@4660 339 return true;
jrose@1592 340 }
duke@435 341
duke@435 342 // suppress a few checks for accessors and trivial methods
twisti@3969 343 if (callee_method->code_size() > MaxTrivialSize) {
kvn@476 344
duke@435 345 // don't inline into giant methods
roland@4409 346 if (C->over_inlining_cutoff()) {
roland@4409 347 if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
roland@4409 348 || !IncrementalInline) {
iignatyev@4660 349 set_msg("NodeCountInliningCutoff");
iignatyev@4660 350 return false;
roland@4409 351 } else {
roland@4409 352 should_delay = true;
roland@4409 353 }
kvn@476 354 }
duke@435 355
kvn@476 356 if ((!UseInterpreter || CompileTheWorld) &&
kvn@476 357 is_init_with_ea(callee_method, caller_method, C)) {
kvn@476 358 // Escape Analysis stress testing when running Xcomp or CTW:
kvn@476 359 // inline constructors even if they are not reached.
kvn@6217 360 } else if (forced_inline()) {
vlivanov@7182 361 // Inlining was forced by CompilerOracle, ciReplay or annotation
kvn@476 362 } else if (profile.count() == 0) {
kvn@476 363 // don't inline unreached call sites
iignatyev@4660 364 set_msg("call site not reached");
iignatyev@4660 365 return false;
kvn@476 366 }
duke@435 367 }
duke@435 368
jrose@1592 369 if (!C->do_inlining() && InlineAccessors) {
iignatyev@4660 370 set_msg("not an accessor");
iignatyev@4660 371 return false;
kvn@476 372 }
roland@6709 373
roland@6709 374 // Limit inlining depth in case inlining is forced or
roland@6709 375 // _max_inline_level was increased to compensate for lambda forms.
roland@6709 376 if (inline_level() > MaxForceInlineLevel) {
roland@6709 377 set_msg("MaxForceInlineLevel");
roland@6709 378 return false;
roland@6709 379 }
never@2981 380 if (inline_level() > _max_inline_level) {
roland@4409 381 if (!callee_method->force_inline() || !IncrementalInline) {
iignatyev@4660 382 set_msg("inlining too deep");
iignatyev@4660 383 return false;
roland@4409 384 } else if (!C->inlining_incrementally()) {
roland@4409 385 should_delay = true;
roland@4409 386 }
kvn@476 387 }
twisti@2687 388
twisti@2866 389 // detect direct and indirect recursive inlining
twisti@5901 390 {
twisti@2866 391 // count the current method and the callee
twisti@5901 392 const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
twisti@5901 393 int inline_level = 0;
twisti@5901 394 if (!is_compiled_lambda_form) {
twisti@5901 395 if (method() == callee_method) {
twisti@5901 396 inline_level++;
twisti@5901 397 }
iignatyev@4660 398 }
twisti@2866 399 // count callers of current method and callee
twisti@5901 400 Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : NULL;
twisti@5901 401 for (JVMState* j = jvms->caller(); j != NULL && j->has_method(); j = j->caller()) {
twisti@5901 402 if (j->method() == callee_method) {
twisti@5901 403 if (is_compiled_lambda_form) {
twisti@5901 404 // Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly
twisti@5901 405 // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
twisti@5901 406 // compiler stack.
twisti@5901 407 Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
twisti@5901 408 if (caller_argument0 == callee_argument0) {
twisti@5901 409 inline_level++;
twisti@5901 410 }
twisti@5901 411 } else {
twisti@5901 412 inline_level++;
iignatyev@4660 413 }
twisti@2687 414 }
twisti@5901 415 }
twisti@5901 416 if (inline_level > MaxRecursiveInlineLevel) {
twisti@5901 417 set_msg("recursive inlining is too deep");
twisti@5901 418 return false;
twisti@2687 419 }
twisti@2687 420 }
twisti@2687 421
twisti@3097 422 int size = callee_method->code_size_for_inlining();
duke@435 423
shade@6314 424 if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
roland@4409 425 if (!callee_method->force_inline() || !IncrementalInline) {
iignatyev@4660 426 set_msg("size > DesiredMethodLimit");
iignatyev@4660 427 return false;
roland@4409 428 } else if (!C->inlining_incrementally()) {
roland@4409 429 should_delay = true;
roland@4409 430 }
duke@435 431 }
duke@435 432
duke@435 433 // ok, inline this method
iignatyev@4660 434 return true;
duke@435 435 }
duke@435 436
duke@435 437 //------------------------------pass_initial_checks----------------------------
duke@435 438 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
duke@435 439 ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
duke@435 440 // Check if a callee_method was suggested
duke@435 441 if( callee_method == NULL ) return false;
duke@435 442 // Check if klass of callee_method is loaded
duke@435 443 if( !callee_holder->is_loaded() ) return false;
duke@435 444 if( !callee_holder->is_initialized() ) return false;
duke@435 445 if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
duke@435 446 // Checks that constant pool's call site has been visited
duke@435 447 // stricter than callee_holder->is_initialized()
duke@435 448 ciBytecodeStream iter(caller_method);
duke@435 449 iter.force_bci(caller_bci);
duke@435 450 Bytecodes::Code call_bc = iter.cur_bc();
twisti@1572 451 // An invokedynamic instruction does not have a klass.
twisti@1572 452 if (call_bc != Bytecodes::_invokedynamic) {
jrose@1920 453 int index = iter.get_index_u2_cpcache();
twisti@1572 454 if (!caller_method->is_klass_loaded(index, true)) {
twisti@1572 455 return false;
twisti@1572 456 }
twisti@1572 457 // Try to do constant pool resolution if running Xcomp
twisti@1572 458 if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
twisti@1572 459 return false;
twisti@1572 460 }
duke@435 461 }
duke@435 462 }
duke@435 463 // We will attempt to see if a class/field/etc got properly loaded. If it
duke@435 464 // did not, it may attempt to throw an exception during our probing. Catch
duke@435 465 // and ignore such exceptions and do not attempt to compile the method.
duke@435 466 if( callee_method->should_exclude() ) return false;
duke@435 467
duke@435 468 return true;
duke@435 469 }
duke@435 470
twisti@3100 471 //------------------------------check_can_parse--------------------------------
twisti@3100 472 const char* InlineTree::check_can_parse(ciMethod* callee) {
twisti@3100 473 // Certain methods cannot be parsed at all:
twisti@3100 474 if ( callee->is_native()) return "native method";
twisti@3969 475 if ( callee->is_abstract()) return "abstract method";
twisti@3100 476 if (!callee->can_be_compiled()) return "not compilable (disabled)";
twisti@3100 477 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
twisti@3100 478 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
twisti@3100 479 return NULL;
twisti@3100 480 }
twisti@3100 481
duke@435 482 //------------------------------print_inlining---------------------------------
vlivanov@4532 483 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,
iignatyev@4660 484 bool success) const {
iignatyev@4660 485 const char* inline_msg = msg();
iignatyev@4660 486 assert(inline_msg != NULL, "just checking");
vlivanov@4532 487 if (C->log() != NULL) {
vlivanov@4532 488 if (success) {
iignatyev@4660 489 C->log()->inline_success(inline_msg);
vlivanov@4532 490 } else {
iignatyev@4660 491 C->log()->inline_fail(inline_msg);
vlivanov@4532 492 }
vlivanov@4532 493 }
kvn@5763 494 if (C->print_inlining()) {
iignatyev@4660 495 C->print_inlining(callee_method, inline_level(), caller_bci, inline_msg);
vlivanov@4532 496 if (callee_method == NULL) tty->print(" callee not monotonic or profiled");
vlivanov@4532 497 if (Verbose && callee_method) {
vlivanov@4532 498 const InlineTree *top = this;
vlivanov@4532 499 while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
vlivanov@4532 500 //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
vlivanov@4532 501 }
duke@435 502 }
duke@435 503 }
duke@435 504
duke@435 505 //------------------------------ok_to_inline-----------------------------------
roland@4409 506 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) {
duke@435 507 assert(callee_method != NULL, "caller checks for optimized virtual!");
roland@4409 508 assert(!should_delay, "should be initialized to false");
duke@435 509 #ifdef ASSERT
duke@435 510 // Make sure the incoming jvms has the same information content as me.
duke@435 511 // This means that we can eventually make this whole class AllStatic.
duke@435 512 if (jvms->caller() == NULL) {
duke@435 513 assert(_caller_jvms == NULL, "redundant instance state");
duke@435 514 } else {
duke@435 515 assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
duke@435 516 }
duke@435 517 assert(_method == jvms->method(), "redundant instance state");
duke@435 518 #endif
duke@435 519 int caller_bci = jvms->bci();
iignatyev@4660 520 ciMethod* caller_method = jvms->method();
duke@435 521
twisti@3100 522 // Do some initial checks.
twisti@3100 523 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
iignatyev@4660 524 set_msg("failed initial checks");
iignatyev@4660 525 print_inlining(callee_method, caller_bci, false /* !success */);
duke@435 526 return NULL;
duke@435 527 }
duke@435 528
twisti@3100 529 // Do some parse checks.
iignatyev@4660 530 set_msg(check_can_parse(callee_method));
iignatyev@4660 531 if (msg() != NULL) {
iignatyev@4660 532 print_inlining(callee_method, caller_bci, false /* !success */);
twisti@3100 533 return NULL;
twisti@3100 534 }
twisti@3100 535
duke@435 536 // Check if inlining policy says no.
duke@435 537 WarmCallInfo wci = *(initial_wci);
iignatyev@4660 538 bool success = try_to_inline(callee_method, caller_method, caller_bci,
twisti@5901 539 jvms, profile, &wci, should_delay);
duke@435 540
duke@435 541 #ifndef PRODUCT
shade@6314 542 if (InlineWarmCalls && (PrintOpto || C->print_inlining())) {
duke@435 543 bool cold = wci.is_cold();
duke@435 544 bool hot = !cold && wci.is_hot();
iignatyev@4660 545 bool old_cold = !success;
duke@435 546 if (old_cold != cold || (Verbose || WizardMode)) {
iignatyev@4660 547 if (msg() == NULL) {
iignatyev@4660 548 set_msg("OK");
iignatyev@4660 549 }
duke@435 550 tty->print(" OldInlining= %4s : %s\n WCI=",
iignatyev@4660 551 old_cold ? "cold" : "hot", msg());
duke@435 552 wci.print();
duke@435 553 }
duke@435 554 }
duke@435 555 #endif
shade@6314 556 if (success) {
shade@6314 557 wci = *(WarmCallInfo::always_hot());
shade@6314 558 } else {
shade@6314 559 wci = *(WarmCallInfo::always_cold());
iignatyev@4660 560 }
shade@6314 561
duke@435 562 if (!InlineWarmCalls) {
duke@435 563 if (!wci.is_cold() && !wci.is_hot()) {
duke@435 564 // Do not inline the warm calls.
duke@435 565 wci = *(WarmCallInfo::always_cold());
duke@435 566 }
duke@435 567 }
duke@435 568
duke@435 569 if (!wci.is_cold()) {
duke@435 570 // Inline!
iignatyev@4660 571 if (msg() == NULL) {
iignatyev@4660 572 set_msg("inline (hot)");
iignatyev@4660 573 }
iignatyev@4660 574 print_inlining(callee_method, caller_bci, true /* success */);
shade@6314 575 build_inline_tree_for_callee(callee_method, jvms, caller_bci);
duke@435 576 if (InlineWarmCalls && !wci.is_hot())
duke@435 577 return new (C) WarmCallInfo(wci); // copy to heap
duke@435 578 return WarmCallInfo::always_hot();
duke@435 579 }
duke@435 580
duke@435 581 // Do not inline
iignatyev@4660 582 if (msg() == NULL) {
iignatyev@4660 583 set_msg("too cold to inline");
iignatyev@4660 584 }
iignatyev@4660 585 print_inlining(callee_method, caller_bci, false /* !success */ );
duke@435 586 return NULL;
duke@435 587 }
duke@435 588
duke@435 589 //------------------------------compute_callee_frequency-----------------------
duke@435 590 float InlineTree::compute_callee_frequency( int caller_bci ) const {
duke@435 591 int count = method()->interpreter_call_site_count(caller_bci);
duke@435 592 int invcnt = method()->interpreter_invocation_count();
duke@435 593 float freq = (float)count/(float)invcnt;
duke@435 594 // Call-site count / interpreter invocation count, scaled recursively.
duke@435 595 // Always between 0.0 and 1.0. Represents the percentage of the method's
duke@435 596 // total execution time used at this call site.
duke@435 597
duke@435 598 return freq;
duke@435 599 }
duke@435 600
duke@435 601 //------------------------------build_inline_tree_for_callee-------------------
duke@435 602 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
duke@435 603 float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);
duke@435 604 // Attempt inlining.
duke@435 605 InlineTree* old_ilt = callee_at(caller_bci, callee_method);
duke@435 606 if (old_ilt != NULL) {
duke@435 607 return old_ilt;
duke@435 608 }
never@2981 609 int max_inline_level_adjust = 0;
jrose@1592 610 if (caller_jvms->method() != NULL) {
zmajo@7854 611 if (caller_jvms->method()->is_compiled_lambda_form()) {
never@2981 612 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames
zmajo@7854 613 } else if (callee_method->is_method_handle_intrinsic() ||
zmajo@7854 614 callee_method->is_compiled_lambda_form()) {
zmajo@7854 615 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implementation
jrose@1592 616 }
kvn@5763 617 if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
never@2981 618 CompileTask::print_inline_indent(inline_level());
twisti@2898 619 tty->print_cr(" \\-> discounting inline depth");
jrose@1592 620 }
never@2981 621 if (max_inline_level_adjust != 0 && C->log()) {
jrose@1592 622 int id1 = C->log()->identify(caller_jvms->method());
jrose@1592 623 int id2 = C->log()->identify(callee_method);
never@2981 624 C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
jrose@1592 625 }
jrose@1592 626 }
never@2981 627 InlineTree* ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust);
never@2981 628 _subtrees.append(ilt);
duke@435 629
duke@435 630 NOT_PRODUCT( _count_inlines += 1; )
duke@435 631
duke@435 632 return ilt;
duke@435 633 }
duke@435 634
duke@435 635
duke@435 636 //---------------------------------------callee_at-----------------------------
duke@435 637 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
duke@435 638 for (int i = 0; i < _subtrees.length(); i++) {
duke@435 639 InlineTree* sub = _subtrees.at(i);
duke@435 640 if (sub->caller_bci() == bci && callee == sub->method()) {
duke@435 641 return sub;
duke@435 642 }
duke@435 643 }
duke@435 644 return NULL;
duke@435 645 }
duke@435 646
duke@435 647
duke@435 648 //------------------------------build_inline_tree_root-------------------------
duke@435 649 InlineTree *InlineTree::build_inline_tree_root() {
duke@435 650 Compile* C = Compile::current();
duke@435 651
duke@435 652 // Root of inline tree
never@2981 653 InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel);
duke@435 654
duke@435 655 return ilt;
duke@435 656 }
duke@435 657
duke@435 658
duke@435 659 //-------------------------find_subtree_from_root-----------------------------
duke@435 660 // Given a jvms, which determines a call chain from the root method,
duke@435 661 // find the corresponding inline tree.
duke@435 662 // Note: This method will be removed or replaced as InlineTree goes away.
twisti@3969 663 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
duke@435 664 InlineTree* iltp = root;
duke@435 665 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
duke@435 666 for (uint d = 1; d <= depth; d++) {
duke@435 667 JVMState* jvmsp = jvms->of_depth(d);
duke@435 668 // Select the corresponding subtree for this bci.
duke@435 669 assert(jvmsp->method() == iltp->method(), "tree still in sync");
duke@435 670 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
duke@435 671 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
twisti@3969 672 if (sub == NULL) {
twisti@3969 673 if (d == depth) {
twisti@3969 674 sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
duke@435 675 }
twisti@3969 676 guarantee(sub != NULL, "should be a sub-ilt here");
twisti@3969 677 return sub;
duke@435 678 }
duke@435 679 iltp = sub;
duke@435 680 }
duke@435 681 return iltp;
duke@435 682 }
never@3138 683
kvn@6217 684 // Count number of nodes in this subtree
kvn@6217 685 int InlineTree::count() const {
kvn@6217 686 int result = 1;
kvn@6217 687 for (int i = 0 ; i < _subtrees.length(); i++) {
kvn@6217 688 result += _subtrees.at(i)->count();
kvn@6217 689 }
kvn@6217 690 return result;
kvn@6217 691 }
kvn@6217 692
kvn@6217 693 void InlineTree::dump_replay_data(outputStream* out) {
kvn@6217 694 out->print(" %d %d ", inline_level(), caller_bci());
kvn@6217 695 method()->dump_name_as_ascii(out);
kvn@6217 696 for (int i = 0 ; i < _subtrees.length(); i++) {
kvn@6217 697 _subtrees.at(i)->dump_replay_data(out);
kvn@6217 698 }
kvn@6217 699 }
never@3138 700
never@3138 701
never@3138 702 #ifndef PRODUCT
never@3138 703 void InlineTree::print_impl(outputStream* st, int indent) const {
never@3138 704 for (int i = 0; i < indent; i++) st->print(" ");
kvn@6217 705 st->print(" @ %d", caller_bci());
never@3138 706 method()->print_short_name(st);
never@3138 707 st->cr();
never@3138 708
never@3138 709 for (int i = 0 ; i < _subtrees.length(); i++) {
never@3138 710 _subtrees.at(i)->print_impl(st, indent + 2);
never@3138 711 }
never@3138 712 }
never@3138 713
never@3138 714 void InlineTree::print_value_on(outputStream* st) const {
never@3138 715 print_impl(st, 2);
never@3138 716 }
never@3138 717 #endif

mercurial