duke@435: /* mikael@4153: * Copyright (c) 1998, 2012, Oracle and/or its affiliates. All rights reserved. duke@435: * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. duke@435: * duke@435: * This code is free software; you can redistribute it and/or modify it duke@435: * under the terms of the GNU General Public License version 2 only, as duke@435: * published by the Free Software Foundation. duke@435: * duke@435: * This code is distributed in the hope that it will be useful, but WITHOUT duke@435: * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or duke@435: * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License duke@435: * version 2 for more details (a copy is included in the LICENSE file that duke@435: * accompanied this code). duke@435: * duke@435: * You should have received a copy of the GNU General Public License version duke@435: * 2 along with this work; if not, write to the Free Software Foundation, duke@435: * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. duke@435: * trims@1907: * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA trims@1907: * or visit www.oracle.com if you need additional information or have any trims@1907: * questions. duke@435: * duke@435: */ duke@435: stefank@2314: #include "precompiled.hpp" minqi@4267: #include "ci/ciReplay.hpp" stefank@2314: #include "classfile/systemDictionary.hpp" stefank@2314: #include "classfile/vmSymbols.hpp" twisti@2687: #include "compiler/compileBroker.hpp" stefank@2314: #include "compiler/compileLog.hpp" stefank@2314: #include "interpreter/linkResolver.hpp" stefank@2314: #include "oops/objArrayKlass.hpp" stefank@2314: #include "opto/callGenerator.hpp" stefank@2314: #include "opto/parse.hpp" stefank@2314: #include "runtime/handles.inline.hpp" duke@435: duke@435: //============================================================================= duke@435: //------------------------------InlineTree------------------------------------- never@2981: InlineTree::InlineTree(Compile* c, never@2981: const InlineTree *caller_tree, ciMethod* callee, never@2981: JVMState* caller_jvms, int caller_bci, never@2981: float site_invoke_ratio, int max_inline_level) : never@2981: C(c), never@2981: _caller_jvms(caller_jvms), never@2981: _caller_tree((InlineTree*) caller_tree), never@2981: _method(callee), never@2981: _site_invoke_ratio(site_invoke_ratio), never@2981: _max_inline_level(max_inline_level), roland@4409: _count_inline_bcs(method()->code_size_for_inlining()), iignatyev@4660: _subtrees(c->comp_arena(), 2, 0, NULL), iignatyev@4660: _msg(NULL) jrose@1592: { duke@435: NOT_PRODUCT(_count_inlines = 0;) duke@435: if (_caller_jvms != NULL) { duke@435: // Keep a private copy of the caller_jvms: duke@435: _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms()); duke@435: _caller_jvms->set_bci(caller_jvms->bci()); cfang@1335: assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining"); duke@435: } duke@435: assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS"); jrose@1592: assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter"); duke@435: assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter"); duke@435: if (UseOldInlining) { duke@435: // Update hierarchical counts, count_inline_bcs() and count_inlines() duke@435: InlineTree *caller = (InlineTree *)caller_tree; duke@435: for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) { duke@435: caller->_count_inline_bcs += count_inline_bcs(); duke@435: NOT_PRODUCT(caller->_count_inlines++;) duke@435: } duke@435: } duke@435: } duke@435: jrose@1592: InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms, never@2981: float site_invoke_ratio, int max_inline_level) : never@2981: C(c), never@2981: _caller_jvms(caller_jvms), never@2981: _caller_tree(NULL), never@2981: _method(callee_method), never@2981: _site_invoke_ratio(site_invoke_ratio), never@2981: _max_inline_level(max_inline_level), iignatyev@4660: _count_inline_bcs(method()->code_size()), iignatyev@4660: _msg(NULL) jrose@1592: { duke@435: NOT_PRODUCT(_count_inlines = 0;) duke@435: assert(!UseOldInlining, "do not use for old stuff"); duke@435: } duke@435: kvn@5113: /** kvn@5113: * Return true when EA is ON and a java constructor is called or kvn@5113: * a super constructor is called from an inlined java constructor. kvn@5113: * Also return true for boxing methods. kvn@5113: */ kvn@476: static bool is_init_with_ea(ciMethod* callee_method, kvn@476: ciMethod* caller_method, Compile* C) { kvn@5113: if (!C->do_escape_analysis() || !EliminateAllocations) { kvn@5113: return false; // EA is off kvn@5113: } kvn@5113: if (callee_method->is_initializer()) { kvn@5113: return true; // constuctor kvn@5113: } kvn@5113: if (caller_method->is_initializer() && kvn@5113: caller_method != C->method() && kvn@5113: caller_method->holder()->is_subclass_of(callee_method->holder())) { kvn@5113: return true; // super constructor is called from inlined constructor kvn@5113: } kvn@5113: if (C->eliminate_boxing() && callee_method->is_boxing_method()) { kvn@5113: return true; kvn@5113: } kvn@5113: return false; kvn@476: } kvn@476: kvn@5113: /** kvn@5113: * Force inlining unboxing accessor. kvn@5113: */ kvn@5110: static bool is_unboxing_method(ciMethod* callee_method, Compile* C) { kvn@5110: return C->eliminate_boxing() && callee_method->is_unboxing_method(); kvn@5110: } kvn@5110: iignatyev@4660: // positive filter: should callee be inlined? iignatyev@4660: bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method, iignatyev@4660: int caller_bci, ciCallProfile& profile, iignatyev@4660: WarmCallInfo* wci_result) { duke@435: // Allows targeted inlining duke@435: if(callee_method->should_inline()) { duke@435: *wci_result = *(WarmCallInfo::always_hot()); kvn@5763: if (C->print_inlining() && Verbose) { never@2981: CompileTask::print_inline_indent(inline_level()); duke@435: tty->print_cr("Inlined method is hot: "); duke@435: } iignatyev@4660: set_msg("force inline by CompilerOracle"); iignatyev@4660: return true; duke@435: } duke@435: twisti@3097: int size = callee_method->code_size_for_inlining(); duke@435: duke@435: // Check for too many throws (and not too huge) kvn@476: if(callee_method->interpreter_throwout_count() > InlineThrowCount && kvn@476: size < InlineThrowMaxSize ) { duke@435: wci_result->set_profit(wci_result->profit() * 100); kvn@5763: if (C->print_inlining() && Verbose) { never@2981: CompileTask::print_inline_indent(inline_level()); duke@435: tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count()); duke@435: } iignatyev@4660: set_msg("many throws"); iignatyev@4660: return true; duke@435: } duke@435: duke@435: if (!UseOldInlining) { iignatyev@4660: set_msg("!UseOldInlining"); iignatyev@4660: return true; // size and frequency are represented in a new way duke@435: } duke@435: twisti@2898: int default_max_inline_size = C->max_inline_size(); twisti@2898: int inline_small_code_size = InlineSmallCode / 4; twisti@2898: int max_inline_size = default_max_inline_size; twisti@2898: duke@435: int call_site_count = method()->scale_count(profile.count()); duke@435: int invoke_count = method()->interpreter_invocation_count(); twisti@2898: twisti@2898: assert(invoke_count != 0, "require invocation count greater than zero"); twisti@2898: int freq = call_site_count / invoke_count; kvn@476: duke@435: // bump the max size if the call is frequent kvn@476: if ((freq >= InlineFrequencyRatio) || kvn@476: (call_site_count >= InlineFrequencyCount) || kvn@5110: is_unboxing_method(callee_method, C) || kvn@476: is_init_with_ea(callee_method, caller_method, C)) { kvn@476: twisti@2898: max_inline_size = C->freq_inline_size(); twisti@2898: if (size <= max_inline_size && TraceFrequencyInlining) { never@2981: CompileTask::print_inline_indent(inline_level()); duke@435: tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count); never@2981: CompileTask::print_inline_indent(inline_level()); duke@435: callee_method->print(); duke@435: tty->cr(); duke@435: } duke@435: } else { duke@435: // Not hot. Check for medium-sized pre-existing nmethod at cold sites. kvn@476: if (callee_method->has_compiled_code() && kvn@4772: callee_method->instructions_size() > inline_small_code_size) { iignatyev@4660: set_msg("already compiled into a medium method"); iignatyev@4660: return false; kvn@4772: } duke@435: } twisti@2898: if (size > max_inline_size) { iignatyev@4660: if (max_inline_size > default_max_inline_size) { iignatyev@4660: set_msg("hot method too big"); iignatyev@4660: } else { iignatyev@4660: set_msg("too big"); iignatyev@4660: } iignatyev@4660: return false; duke@435: } iignatyev@4660: return true; duke@435: } duke@435: duke@435: iignatyev@4660: // negative filter: should callee NOT be inlined? iignatyev@4660: bool InlineTree::should_not_inline(ciMethod *callee_method, iignatyev@4660: ciMethod* caller_method, iignatyev@4660: WarmCallInfo* wci_result) { iignatyev@4660: iignatyev@4660: const char* fail_msg = NULL; iignatyev@4660: iignatyev@4660: // First check all inlining restrictions which are required for correctness iignatyev@4660: if ( callee_method->is_abstract()) { iignatyev@4660: fail_msg = "abstract method"; // // note: we allow ik->is_abstract() iignatyev@4660: } else if (!callee_method->holder()->is_initialized()) { iignatyev@4660: fail_msg = "method holder not initialized"; iignatyev@4660: } else if ( callee_method->is_native()) { iignatyev@4660: fail_msg = "native method"; iignatyev@4660: } else if ( callee_method->dont_inline()) { iignatyev@4660: fail_msg = "don't inline by annotation"; iignatyev@4660: } iignatyev@4660: duke@435: if (!UseOldInlining) { iignatyev@4660: if (fail_msg != NULL) { duke@435: *wci_result = *(WarmCallInfo::always_cold()); iignatyev@4660: set_msg(fail_msg); iignatyev@4660: return true; duke@435: } duke@435: duke@435: if (callee_method->has_unloaded_classes_in_signature()) { duke@435: wci_result->set_profit(wci_result->profit() * 0.1); duke@435: } duke@435: duke@435: // don't inline exception code unless the top method belongs to an duke@435: // exception class duke@435: if (callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { duke@435: ciMethod* top_method = caller_jvms() ? caller_jvms()->of_depth(1)->method() : method(); duke@435: if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { duke@435: wci_result->set_profit(wci_result->profit() * 0.1); duke@435: } duke@435: } duke@435: twisti@3969: if (callee_method->has_compiled_code() && minqi@4267: callee_method->instructions_size() > InlineSmallCode) { duke@435: wci_result->set_profit(wci_result->profit() * 0.1); duke@435: // %%% adjust wci_result->size()? duke@435: } duke@435: iignatyev@4660: return false; duke@435: } duke@435: iignatyev@4660: // one more inlining restriction iignatyev@4660: if (fail_msg == NULL && callee_method->has_unloaded_classes_in_signature()) { iignatyev@4660: fail_msg = "unloaded signature classes"; iignatyev@4660: } twisti@1573: iignatyev@4660: if (fail_msg != NULL) { iignatyev@4660: set_msg(fail_msg); iignatyev@4660: return true; iignatyev@4660: } iignatyev@4660: iignatyev@4660: // ignore heuristic controls on inlining roland@4409: if (callee_method->should_inline()) { iignatyev@4660: set_msg("force inline by CompilerOracle"); iignatyev@4660: return false; duke@435: } duke@435: kvn@5110: if (callee_method->should_not_inline()) { kvn@5110: set_msg("disallowed by CompilerOracle"); kvn@5110: return true; kvn@5110: } kvn@5110: kvn@5110: #ifndef PRODUCT kvn@5110: if (ciReplay::should_not_inline(callee_method)) { kvn@5110: set_msg("disallowed by ciReplay"); kvn@5110: return true; kvn@5110: } kvn@5110: #endif kvn@5110: duke@435: // Now perform checks which are heuristic duke@435: kvn@5110: if (is_unboxing_method(callee_method, C)) { kvn@5110: // Inline unboxing methods. kvn@5110: return false; kvn@5110: } kvn@5110: roland@4409: if (!callee_method->force_inline()) { roland@4409: if (callee_method->has_compiled_code() && roland@4409: callee_method->instructions_size() > InlineSmallCode) { iignatyev@4660: set_msg("already compiled into a big method"); iignatyev@4660: return true; roland@4409: } twisti@3969: } duke@435: duke@435: // don't inline exception code unless the top method belongs to an duke@435: // exception class duke@435: if (caller_tree() != NULL && duke@435: callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { duke@435: const InlineTree *top = this; duke@435: while (top->caller_tree() != NULL) top = top->caller_tree(); duke@435: ciInstanceKlass* k = top->method()->holder(); iignatyev@4660: if (!k->is_subclass_of(C->env()->Throwable_klass())) { iignatyev@4660: set_msg("exception method"); iignatyev@4660: return true; iignatyev@4660: } duke@435: } duke@435: duke@435: // use frequency-based objections only for non-trivial methods iignatyev@4660: if (callee_method->code_size() <= MaxTrivialSize) { iignatyev@4660: return false; iignatyev@4660: } kvn@476: kvn@476: // don't use counts with -Xcomp or CTW kvn@476: if (UseInterpreter && !CompileTheWorld) { kvn@476: kvn@476: if (!callee_method->has_compiled_code() && kvn@476: !callee_method->was_executed_more_than(0)) { iignatyev@4660: set_msg("never executed"); iignatyev@4660: return true; kvn@476: } kvn@476: kvn@476: if (is_init_with_ea(callee_method, caller_method, C)) { kvn@476: // Escape Analysis: inline all executed constructors kvn@5110: return false; kvn@476: } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold, kvn@476: CompileThreshold >> 1))) { iignatyev@4660: set_msg("executed < MinInliningThreshold times"); iignatyev@4660: return true; kvn@476: } duke@435: } duke@435: iignatyev@4660: return false; duke@435: } duke@435: duke@435: //-----------------------------try_to_inline----------------------------------- iignatyev@4660: // return true if ok duke@435: // Relocated from "InliningClosure::try_to_inline" iignatyev@4660: bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, iignatyev@4660: int caller_bci, ciCallProfile& profile, iignatyev@4660: WarmCallInfo* wci_result, bool& should_delay) { iignatyev@4660: iignatyev@4660: // Old algorithm had funny accumulating BC-size counters duke@435: if (UseOldInlining && ClipInlining duke@435: && (int)count_inline_bcs() >= DesiredMethodLimit) { roland@4409: if (!callee_method->force_inline() || !IncrementalInline) { iignatyev@4660: set_msg("size > DesiredMethodLimit"); iignatyev@4660: return false; roland@4409: } else if (!C->inlining_incrementally()) { roland@4409: should_delay = true; roland@4409: } duke@435: } duke@435: iignatyev@4660: if (!should_inline(callee_method, caller_method, caller_bci, profile, iignatyev@4660: wci_result)) { iignatyev@4660: return false; iignatyev@4660: } iignatyev@4660: if (should_not_inline(callee_method, caller_method, wci_result)) { iignatyev@4660: return false; iignatyev@4660: } duke@435: jrose@1592: if (InlineAccessors && callee_method->is_accessor()) { jrose@1592: // accessor methods are not subject to any of the following limits. iignatyev@4660: set_msg("accessor"); iignatyev@4660: return true; jrose@1592: } duke@435: duke@435: // suppress a few checks for accessors and trivial methods twisti@3969: if (callee_method->code_size() > MaxTrivialSize) { kvn@476: duke@435: // don't inline into giant methods roland@4409: if (C->over_inlining_cutoff()) { roland@4409: if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form()) roland@4409: || !IncrementalInline) { iignatyev@4660: set_msg("NodeCountInliningCutoff"); iignatyev@4660: return false; roland@4409: } else { roland@4409: should_delay = true; roland@4409: } kvn@476: } duke@435: kvn@476: if ((!UseInterpreter || CompileTheWorld) && kvn@476: is_init_with_ea(callee_method, caller_method, C)) { kvn@476: kvn@476: // Escape Analysis stress testing when running Xcomp or CTW: kvn@476: // inline constructors even if they are not reached. kvn@476: kvn@476: } else if (profile.count() == 0) { kvn@476: // don't inline unreached call sites iignatyev@4660: set_msg("call site not reached"); iignatyev@4660: return false; kvn@476: } duke@435: } duke@435: jrose@1592: if (!C->do_inlining() && InlineAccessors) { iignatyev@4660: set_msg("not an accessor"); iignatyev@4660: return false; kvn@476: } never@2981: if (inline_level() > _max_inline_level) { roland@4409: if (!callee_method->force_inline() || !IncrementalInline) { iignatyev@4660: set_msg("inlining too deep"); iignatyev@4660: return false; roland@4409: } else if (!C->inlining_incrementally()) { roland@4409: should_delay = true; roland@4409: } kvn@476: } twisti@2687: twisti@2866: // detect direct and indirect recursive inlining twisti@3969: if (!callee_method->is_compiled_lambda_form()) { twisti@2866: // count the current method and the callee twisti@2866: int inline_level = (method() == callee_method) ? 1 : 0; iignatyev@4660: if (inline_level > MaxRecursiveInlineLevel) { iignatyev@4660: set_msg("recursively inlining too deep"); iignatyev@4660: return false; iignatyev@4660: } twisti@2866: // count callers of current method and callee twisti@2687: JVMState* jvms = caller_jvms(); twisti@2687: while (jvms != NULL && jvms->has_method()) { twisti@2687: if (jvms->method() == callee_method) { twisti@2687: inline_level++; iignatyev@4660: if (inline_level > MaxRecursiveInlineLevel) { iignatyev@4660: set_msg("recursively inlining too deep"); iignatyev@4660: return false; iignatyev@4660: } twisti@2687: } twisti@2687: jvms = jvms->caller(); twisti@2687: } twisti@2687: } twisti@2687: twisti@3097: int size = callee_method->code_size_for_inlining(); duke@435: duke@435: if (UseOldInlining && ClipInlining duke@435: && (int)count_inline_bcs() + size >= DesiredMethodLimit) { roland@4409: if (!callee_method->force_inline() || !IncrementalInline) { iignatyev@4660: set_msg("size > DesiredMethodLimit"); iignatyev@4660: return false; roland@4409: } else if (!C->inlining_incrementally()) { roland@4409: should_delay = true; roland@4409: } duke@435: } duke@435: duke@435: // ok, inline this method iignatyev@4660: return true; duke@435: } duke@435: duke@435: //------------------------------pass_initial_checks---------------------------- duke@435: bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) { duke@435: ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL; duke@435: // Check if a callee_method was suggested duke@435: if( callee_method == NULL ) return false; duke@435: // Check if klass of callee_method is loaded duke@435: if( !callee_holder->is_loaded() ) return false; duke@435: if( !callee_holder->is_initialized() ) return false; duke@435: if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) { duke@435: // Checks that constant pool's call site has been visited duke@435: // stricter than callee_holder->is_initialized() duke@435: ciBytecodeStream iter(caller_method); duke@435: iter.force_bci(caller_bci); duke@435: Bytecodes::Code call_bc = iter.cur_bc(); twisti@1572: // An invokedynamic instruction does not have a klass. twisti@1572: if (call_bc != Bytecodes::_invokedynamic) { jrose@1920: int index = iter.get_index_u2_cpcache(); twisti@1572: if (!caller_method->is_klass_loaded(index, true)) { twisti@1572: return false; twisti@1572: } twisti@1572: // Try to do constant pool resolution if running Xcomp twisti@1572: if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) { twisti@1572: return false; twisti@1572: } duke@435: } duke@435: } duke@435: // We will attempt to see if a class/field/etc got properly loaded. If it duke@435: // did not, it may attempt to throw an exception during our probing. Catch duke@435: // and ignore such exceptions and do not attempt to compile the method. duke@435: if( callee_method->should_exclude() ) return false; duke@435: duke@435: return true; duke@435: } duke@435: twisti@3100: //------------------------------check_can_parse-------------------------------- twisti@3100: const char* InlineTree::check_can_parse(ciMethod* callee) { twisti@3100: // Certain methods cannot be parsed at all: twisti@3100: if ( callee->is_native()) return "native method"; twisti@3969: if ( callee->is_abstract()) return "abstract method"; twisti@3100: if (!callee->can_be_compiled()) return "not compilable (disabled)"; twisti@3100: if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)"; twisti@3100: if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)"; twisti@3100: return NULL; twisti@3100: } twisti@3100: duke@435: //------------------------------print_inlining--------------------------------- vlivanov@4532: void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci, iignatyev@4660: bool success) const { iignatyev@4660: const char* inline_msg = msg(); iignatyev@4660: assert(inline_msg != NULL, "just checking"); vlivanov@4532: if (C->log() != NULL) { vlivanov@4532: if (success) { iignatyev@4660: C->log()->inline_success(inline_msg); vlivanov@4532: } else { iignatyev@4660: C->log()->inline_fail(inline_msg); vlivanov@4532: } vlivanov@4532: } kvn@5763: if (C->print_inlining()) { iignatyev@4660: C->print_inlining(callee_method, inline_level(), caller_bci, inline_msg); vlivanov@4532: if (callee_method == NULL) tty->print(" callee not monotonic or profiled"); vlivanov@4532: if (Verbose && callee_method) { vlivanov@4532: const InlineTree *top = this; vlivanov@4532: while( top->caller_tree() != NULL ) { top = top->caller_tree(); } vlivanov@4532: //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count()); vlivanov@4532: } duke@435: } duke@435: } duke@435: duke@435: //------------------------------ok_to_inline----------------------------------- roland@4409: WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) { duke@435: assert(callee_method != NULL, "caller checks for optimized virtual!"); roland@4409: assert(!should_delay, "should be initialized to false"); duke@435: #ifdef ASSERT duke@435: // Make sure the incoming jvms has the same information content as me. duke@435: // This means that we can eventually make this whole class AllStatic. duke@435: if (jvms->caller() == NULL) { duke@435: assert(_caller_jvms == NULL, "redundant instance state"); duke@435: } else { duke@435: assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state"); duke@435: } duke@435: assert(_method == jvms->method(), "redundant instance state"); duke@435: #endif duke@435: int caller_bci = jvms->bci(); iignatyev@4660: ciMethod* caller_method = jvms->method(); duke@435: twisti@3100: // Do some initial checks. twisti@3100: if (!pass_initial_checks(caller_method, caller_bci, callee_method)) { iignatyev@4660: set_msg("failed initial checks"); iignatyev@4660: print_inlining(callee_method, caller_bci, false /* !success */); duke@435: return NULL; duke@435: } duke@435: twisti@3100: // Do some parse checks. iignatyev@4660: set_msg(check_can_parse(callee_method)); iignatyev@4660: if (msg() != NULL) { iignatyev@4660: print_inlining(callee_method, caller_bci, false /* !success */); twisti@3100: return NULL; twisti@3100: } twisti@3100: duke@435: // Check if inlining policy says no. duke@435: WarmCallInfo wci = *(initial_wci); iignatyev@4660: bool success = try_to_inline(callee_method, caller_method, caller_bci, iignatyev@4660: profile, &wci, should_delay); duke@435: duke@435: #ifndef PRODUCT duke@435: if (UseOldInlining && InlineWarmCalls kvn@5763: && (PrintOpto || C->print_inlining())) { duke@435: bool cold = wci.is_cold(); duke@435: bool hot = !cold && wci.is_hot(); iignatyev@4660: bool old_cold = !success; duke@435: if (old_cold != cold || (Verbose || WizardMode)) { iignatyev@4660: if (msg() == NULL) { iignatyev@4660: set_msg("OK"); iignatyev@4660: } duke@435: tty->print(" OldInlining= %4s : %s\n WCI=", iignatyev@4660: old_cold ? "cold" : "hot", msg()); duke@435: wci.print(); duke@435: } duke@435: } duke@435: #endif duke@435: if (UseOldInlining) { iignatyev@4660: if (success) { duke@435: wci = *(WarmCallInfo::always_hot()); iignatyev@4660: } else { duke@435: wci = *(WarmCallInfo::always_cold()); vlivanov@4532: } iignatyev@4660: } duke@435: if (!InlineWarmCalls) { duke@435: if (!wci.is_cold() && !wci.is_hot()) { duke@435: // Do not inline the warm calls. duke@435: wci = *(WarmCallInfo::always_cold()); duke@435: } duke@435: } duke@435: duke@435: if (!wci.is_cold()) { duke@435: // Inline! iignatyev@4660: if (msg() == NULL) { iignatyev@4660: set_msg("inline (hot)"); iignatyev@4660: } iignatyev@4660: print_inlining(callee_method, caller_bci, true /* success */); duke@435: if (UseOldInlining) duke@435: build_inline_tree_for_callee(callee_method, jvms, caller_bci); duke@435: if (InlineWarmCalls && !wci.is_hot()) duke@435: return new (C) WarmCallInfo(wci); // copy to heap duke@435: return WarmCallInfo::always_hot(); duke@435: } duke@435: duke@435: // Do not inline iignatyev@4660: if (msg() == NULL) { iignatyev@4660: set_msg("too cold to inline"); iignatyev@4660: } iignatyev@4660: print_inlining(callee_method, caller_bci, false /* !success */ ); duke@435: return NULL; duke@435: } duke@435: duke@435: //------------------------------compute_callee_frequency----------------------- duke@435: float InlineTree::compute_callee_frequency( int caller_bci ) const { duke@435: int count = method()->interpreter_call_site_count(caller_bci); duke@435: int invcnt = method()->interpreter_invocation_count(); duke@435: float freq = (float)count/(float)invcnt; duke@435: // Call-site count / interpreter invocation count, scaled recursively. duke@435: // Always between 0.0 and 1.0. Represents the percentage of the method's duke@435: // total execution time used at this call site. duke@435: duke@435: return freq; duke@435: } duke@435: duke@435: //------------------------------build_inline_tree_for_callee------------------- duke@435: InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) { duke@435: float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci); duke@435: // Attempt inlining. duke@435: InlineTree* old_ilt = callee_at(caller_bci, callee_method); duke@435: if (old_ilt != NULL) { duke@435: return old_ilt; duke@435: } never@2981: int max_inline_level_adjust = 0; jrose@1592: if (caller_jvms->method() != NULL) { twisti@3969: if (caller_jvms->method()->is_compiled_lambda_form()) never@2981: max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames twisti@3969: else if (callee_method->is_method_handle_intrinsic() || twisti@3969: callee_method->is_compiled_lambda_form()) { never@2981: max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implem jrose@1592: } kvn@5763: if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) { never@2981: CompileTask::print_inline_indent(inline_level()); twisti@2898: tty->print_cr(" \\-> discounting inline depth"); jrose@1592: } never@2981: if (max_inline_level_adjust != 0 && C->log()) { jrose@1592: int id1 = C->log()->identify(caller_jvms->method()); jrose@1592: int id2 = C->log()->identify(callee_method); never@2981: C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2); jrose@1592: } jrose@1592: } never@2981: InlineTree* ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust); never@2981: _subtrees.append(ilt); duke@435: duke@435: NOT_PRODUCT( _count_inlines += 1; ) duke@435: duke@435: return ilt; duke@435: } duke@435: duke@435: duke@435: //---------------------------------------callee_at----------------------------- duke@435: InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const { duke@435: for (int i = 0; i < _subtrees.length(); i++) { duke@435: InlineTree* sub = _subtrees.at(i); duke@435: if (sub->caller_bci() == bci && callee == sub->method()) { duke@435: return sub; duke@435: } duke@435: } duke@435: return NULL; duke@435: } duke@435: duke@435: duke@435: //------------------------------build_inline_tree_root------------------------- duke@435: InlineTree *InlineTree::build_inline_tree_root() { duke@435: Compile* C = Compile::current(); duke@435: duke@435: // Root of inline tree never@2981: InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel); duke@435: duke@435: return ilt; duke@435: } duke@435: duke@435: duke@435: //-------------------------find_subtree_from_root----------------------------- duke@435: // Given a jvms, which determines a call chain from the root method, duke@435: // find the corresponding inline tree. duke@435: // Note: This method will be removed or replaced as InlineTree goes away. twisti@3969: InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) { duke@435: InlineTree* iltp = root; duke@435: uint depth = jvms && jvms->has_method() ? jvms->depth() : 0; duke@435: for (uint d = 1; d <= depth; d++) { duke@435: JVMState* jvmsp = jvms->of_depth(d); duke@435: // Select the corresponding subtree for this bci. duke@435: assert(jvmsp->method() == iltp->method(), "tree still in sync"); duke@435: ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method(); duke@435: InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee); twisti@3969: if (sub == NULL) { twisti@3969: if (d == depth) { twisti@3969: sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci()); duke@435: } twisti@3969: guarantee(sub != NULL, "should be a sub-ilt here"); twisti@3969: return sub; duke@435: } duke@435: iltp = sub; duke@435: } duke@435: return iltp; duke@435: } never@3138: never@3138: never@3138: never@3138: #ifndef PRODUCT never@3138: void InlineTree::print_impl(outputStream* st, int indent) const { never@3138: for (int i = 0; i < indent; i++) st->print(" "); never@3138: st->print(" @ %d ", caller_bci()); never@3138: method()->print_short_name(st); never@3138: st->cr(); never@3138: never@3138: for (int i = 0 ; i < _subtrees.length(); i++) { never@3138: _subtrees.at(i)->print_impl(st, indent + 2); never@3138: } never@3138: } never@3138: never@3138: void InlineTree::print_value_on(outputStream* st) const { never@3138: print_impl(st, 2); never@3138: } never@3138: #endif