Fri, 25 May 2012 07:53:11 -0700
7170463: C2 should recognize "obj.getClass() == A.class" code pattern
Summary: optimize this code pattern obj.getClass() == A.class.
Reviewed-by: jrose, kvn
Contributed-by: Krystal Mok <sajia@taobao.com>
1 /*
2 * Copyright (c) 1998, 2011, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp"
27 #include "classfile/vmSymbols.hpp"
28 #include "compiler/compileBroker.hpp"
29 #include "compiler/compileLog.hpp"
30 #include "interpreter/linkResolver.hpp"
31 #include "oops/objArrayKlass.hpp"
32 #include "opto/callGenerator.hpp"
33 #include "opto/parse.hpp"
34 #include "runtime/handles.inline.hpp"
36 //=============================================================================
37 //------------------------------InlineTree-------------------------------------
38 InlineTree::InlineTree(Compile* c,
39 const InlineTree *caller_tree, ciMethod* callee,
40 JVMState* caller_jvms, int caller_bci,
41 float site_invoke_ratio, int max_inline_level) :
42 C(c),
43 _caller_jvms(caller_jvms),
44 _caller_tree((InlineTree*) caller_tree),
45 _method(callee),
46 _site_invoke_ratio(site_invoke_ratio),
47 _max_inline_level(max_inline_level),
48 _count_inline_bcs(method()->code_size_for_inlining())
49 {
50 NOT_PRODUCT(_count_inlines = 0;)
51 if (_caller_jvms != NULL) {
52 // Keep a private copy of the caller_jvms:
53 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
54 _caller_jvms->set_bci(caller_jvms->bci());
55 assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
56 }
57 assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
58 assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
59 assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
60 if (UseOldInlining) {
61 // Update hierarchical counts, count_inline_bcs() and count_inlines()
62 InlineTree *caller = (InlineTree *)caller_tree;
63 for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
64 caller->_count_inline_bcs += count_inline_bcs();
65 NOT_PRODUCT(caller->_count_inlines++;)
66 }
67 }
68 }
70 InlineTree::InlineTree(Compile* c, ciMethod* callee_method, JVMState* caller_jvms,
71 float site_invoke_ratio, int max_inline_level) :
72 C(c),
73 _caller_jvms(caller_jvms),
74 _caller_tree(NULL),
75 _method(callee_method),
76 _site_invoke_ratio(site_invoke_ratio),
77 _max_inline_level(max_inline_level),
78 _count_inline_bcs(method()->code_size())
79 {
80 NOT_PRODUCT(_count_inlines = 0;)
81 assert(!UseOldInlining, "do not use for old stuff");
82 }
84 static bool is_init_with_ea(ciMethod* callee_method,
85 ciMethod* caller_method, Compile* C) {
86 // True when EA is ON and a java constructor is called or
87 // a super constructor is called from an inlined java constructor.
88 return C->do_escape_analysis() && EliminateAllocations &&
89 ( callee_method->is_initializer() ||
90 (caller_method->is_initializer() &&
91 caller_method != C->method() &&
92 caller_method->holder()->is_subclass_of(callee_method->holder()))
93 );
94 }
96 // positive filter: should send be inlined? returns NULL, if yes, or rejection msg
97 const char* InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) const {
98 // Allows targeted inlining
99 if(callee_method->should_inline()) {
100 *wci_result = *(WarmCallInfo::always_hot());
101 if (PrintInlining && Verbose) {
102 CompileTask::print_inline_indent(inline_level());
103 tty->print_cr("Inlined method is hot: ");
104 }
105 return NULL;
106 }
108 // positive filter: should send be inlined? returns NULL (--> yes)
109 // or rejection msg
110 int size = callee_method->code_size_for_inlining();
112 // Check for too many throws (and not too huge)
113 if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
114 size < InlineThrowMaxSize ) {
115 wci_result->set_profit(wci_result->profit() * 100);
116 if (PrintInlining && Verbose) {
117 CompileTask::print_inline_indent(inline_level());
118 tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
119 }
120 return NULL;
121 }
123 if (!UseOldInlining) {
124 return NULL; // size and frequency are represented in a new way
125 }
127 int default_max_inline_size = C->max_inline_size();
128 int inline_small_code_size = InlineSmallCode / 4;
129 int max_inline_size = default_max_inline_size;
131 int call_site_count = method()->scale_count(profile.count());
132 int invoke_count = method()->interpreter_invocation_count();
134 // Bytecoded method handle adapters do not have interpreter
135 // profiling data but only made up MDO data. Get the counter from
136 // there.
137 if (caller_method->is_method_handle_adapter()) {
138 assert(method()->method_data_or_null(), "must have an MDO");
139 ciMethodData* mdo = method()->method_data();
140 ciProfileData* mha_profile = mdo->bci_to_data(caller_bci);
141 assert(mha_profile, "must exist");
142 CounterData* cd = mha_profile->as_CounterData();
143 invoke_count = cd->count();
144 if (invoke_count == 0) {
145 return "method handle not reached";
146 }
148 if (_caller_jvms != NULL && _caller_jvms->method() != NULL &&
149 _caller_jvms->method()->method_data() != NULL &&
150 !_caller_jvms->method()->method_data()->is_empty()) {
151 ciMethodData* mdo = _caller_jvms->method()->method_data();
152 ciProfileData* mha_profile = mdo->bci_to_data(_caller_jvms->bci());
153 assert(mha_profile, "must exist");
154 CounterData* cd = mha_profile->as_CounterData();
155 call_site_count = cd->count();
156 } else {
157 call_site_count = invoke_count; // use the same value
158 }
159 }
161 assert(invoke_count != 0, "require invocation count greater than zero");
162 int freq = call_site_count / invoke_count;
164 // bump the max size if the call is frequent
165 if ((freq >= InlineFrequencyRatio) ||
166 (call_site_count >= InlineFrequencyCount) ||
167 is_init_with_ea(callee_method, caller_method, C)) {
169 max_inline_size = C->freq_inline_size();
170 if (size <= max_inline_size && TraceFrequencyInlining) {
171 CompileTask::print_inline_indent(inline_level());
172 tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);
173 CompileTask::print_inline_indent(inline_level());
174 callee_method->print();
175 tty->cr();
176 }
177 } else {
178 // Not hot. Check for medium-sized pre-existing nmethod at cold sites.
179 if (callee_method->has_compiled_code() &&
180 callee_method->instructions_size(CompLevel_full_optimization) > inline_small_code_size)
181 return "already compiled into a medium method";
182 }
183 if (size > max_inline_size) {
184 if (max_inline_size > default_max_inline_size)
185 return "hot method too big";
186 return "too big";
187 }
188 return NULL;
189 }
192 // negative filter: should send NOT be inlined? returns NULL, ok to inline, or rejection msg
193 const char* InlineTree::should_not_inline(ciMethod *callee_method, ciMethod* caller_method, WarmCallInfo* wci_result) const {
194 // negative filter: should send NOT be inlined? returns NULL (--> inline) or rejection msg
195 if (!UseOldInlining) {
196 const char* fail = NULL;
197 if (callee_method->is_abstract()) fail = "abstract method";
198 // note: we allow ik->is_abstract()
199 if (!callee_method->holder()->is_initialized()) fail = "method holder not initialized";
200 if (callee_method->is_native()) fail = "native method";
202 if (fail) {
203 *wci_result = *(WarmCallInfo::always_cold());
204 return fail;
205 }
207 if (callee_method->has_unloaded_classes_in_signature()) {
208 wci_result->set_profit(wci_result->profit() * 0.1);
209 }
211 // don't inline exception code unless the top method belongs to an
212 // exception class
213 if (callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
214 ciMethod* top_method = caller_jvms() ? caller_jvms()->of_depth(1)->method() : method();
215 if (!top_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
216 wci_result->set_profit(wci_result->profit() * 0.1);
217 }
218 }
220 if (callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode) {
221 wci_result->set_profit(wci_result->profit() * 0.1);
222 // %%% adjust wci_result->size()?
223 }
225 return NULL;
226 }
228 // Always inline MethodHandle methods and generated MethodHandle adapters.
229 if (callee_method->is_method_handle_invoke() || callee_method->is_method_handle_adapter())
230 return NULL;
232 // First check all inlining restrictions which are required for correctness
233 if (callee_method->is_abstract()) return "abstract method";
234 // note: we allow ik->is_abstract()
235 if (!callee_method->holder()->is_initialized()) return "method holder not initialized";
236 if (callee_method->is_native()) return "native method";
237 if (callee_method->has_unloaded_classes_in_signature()) return "unloaded signature classes";
239 if (callee_method->should_inline()) {
240 // ignore heuristic controls on inlining
241 return NULL;
242 }
244 // Now perform checks which are heuristic
246 if( callee_method->has_compiled_code() && callee_method->instructions_size(CompLevel_full_optimization) > InlineSmallCode )
247 return "already compiled into a big method";
249 // don't inline exception code unless the top method belongs to an
250 // exception class
251 if (caller_tree() != NULL &&
252 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
253 const InlineTree *top = this;
254 while (top->caller_tree() != NULL) top = top->caller_tree();
255 ciInstanceKlass* k = top->method()->holder();
256 if (!k->is_subclass_of(C->env()->Throwable_klass()))
257 return "exception method";
258 }
260 if (callee_method->should_not_inline()) {
261 return "disallowed by CompilerOracle";
262 }
264 if (UseStringCache) {
265 // Do not inline StringCache::profile() method used only at the beginning.
266 if (callee_method->name() == ciSymbol::profile_name() &&
267 callee_method->holder()->name() == ciSymbol::java_lang_StringCache()) {
268 return "profiling method";
269 }
270 }
272 // use frequency-based objections only for non-trivial methods
273 if (callee_method->code_size_for_inlining() <= MaxTrivialSize) return NULL;
275 // don't use counts with -Xcomp or CTW
276 if (UseInterpreter && !CompileTheWorld) {
278 if (!callee_method->has_compiled_code() &&
279 !callee_method->was_executed_more_than(0)) {
280 return "never executed";
281 }
283 if (is_init_with_ea(callee_method, caller_method, C)) {
285 // Escape Analysis: inline all executed constructors
287 } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
288 CompileThreshold >> 1))) {
289 return "executed < MinInliningThreshold times";
290 }
291 }
293 return NULL;
294 }
296 //-----------------------------try_to_inline-----------------------------------
297 // return NULL if ok, reason for not inlining otherwise
298 // Relocated from "InliningClosure::try_to_inline"
299 const char* InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile, WarmCallInfo* wci_result) {
301 // Old algorithm had funny accumulating BC-size counters
302 if (UseOldInlining && ClipInlining
303 && (int)count_inline_bcs() >= DesiredMethodLimit) {
304 return "size > DesiredMethodLimit";
305 }
307 const char *msg = NULL;
308 msg = should_inline(callee_method, caller_method, caller_bci, profile, wci_result);
309 if (msg != NULL)
310 return msg;
312 msg = should_not_inline(callee_method, caller_method, wci_result);
313 if (msg != NULL)
314 return msg;
316 if (InlineAccessors && callee_method->is_accessor()) {
317 // accessor methods are not subject to any of the following limits.
318 return NULL;
319 }
321 // suppress a few checks for accessors and trivial methods
322 if (callee_method->code_size_for_inlining() > MaxTrivialSize) {
324 // don't inline into giant methods
325 if (C->unique() > (uint)NodeCountInliningCutoff) {
326 return "NodeCountInliningCutoff";
327 }
329 if ((!UseInterpreter || CompileTheWorld) &&
330 is_init_with_ea(callee_method, caller_method, C)) {
332 // Escape Analysis stress testing when running Xcomp or CTW:
333 // inline constructors even if they are not reached.
335 } else if (profile.count() == 0) {
336 // don't inline unreached call sites
337 return "call site not reached";
338 }
339 }
341 if (!C->do_inlining() && InlineAccessors) {
342 return "not an accessor";
343 }
344 if (inline_level() > _max_inline_level) {
345 return "inlining too deep";
346 }
348 // detect direct and indirect recursive inlining
349 {
350 // count the current method and the callee
351 int inline_level = (method() == callee_method) ? 1 : 0;
352 if (inline_level > MaxRecursiveInlineLevel)
353 return "recursively inlining too deep";
354 // count callers of current method and callee
355 JVMState* jvms = caller_jvms();
356 while (jvms != NULL && jvms->has_method()) {
357 if (jvms->method() == callee_method) {
358 inline_level++;
359 if (inline_level > MaxRecursiveInlineLevel)
360 return "recursively inlining too deep";
361 }
362 jvms = jvms->caller();
363 }
364 }
366 int size = callee_method->code_size_for_inlining();
368 if (UseOldInlining && ClipInlining
369 && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
370 return "size > DesiredMethodLimit";
371 }
373 // ok, inline this method
374 return NULL;
375 }
377 //------------------------------pass_initial_checks----------------------------
378 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
379 ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
380 // Check if a callee_method was suggested
381 if( callee_method == NULL ) return false;
382 // Check if klass of callee_method is loaded
383 if( !callee_holder->is_loaded() ) return false;
384 if( !callee_holder->is_initialized() ) return false;
385 if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
386 // Checks that constant pool's call site has been visited
387 // stricter than callee_holder->is_initialized()
388 ciBytecodeStream iter(caller_method);
389 iter.force_bci(caller_bci);
390 Bytecodes::Code call_bc = iter.cur_bc();
391 // An invokedynamic instruction does not have a klass.
392 if (call_bc != Bytecodes::_invokedynamic) {
393 int index = iter.get_index_u2_cpcache();
394 if (!caller_method->is_klass_loaded(index, true)) {
395 return false;
396 }
397 // Try to do constant pool resolution if running Xcomp
398 if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
399 return false;
400 }
401 }
402 }
403 // We will attempt to see if a class/field/etc got properly loaded. If it
404 // did not, it may attempt to throw an exception during our probing. Catch
405 // and ignore such exceptions and do not attempt to compile the method.
406 if( callee_method->should_exclude() ) return false;
408 return true;
409 }
411 //------------------------------check_can_parse--------------------------------
412 const char* InlineTree::check_can_parse(ciMethod* callee) {
413 // Certain methods cannot be parsed at all:
414 if ( callee->is_native()) return "native method";
415 if (!callee->can_be_compiled()) return "not compilable (disabled)";
416 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
417 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
418 return NULL;
419 }
421 //------------------------------print_inlining---------------------------------
422 // Really, the failure_msg can be a success message also.
423 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci, const char* failure_msg) const {
424 CompileTask::print_inlining(callee_method, inline_level(), caller_bci, failure_msg ? failure_msg : "inline");
425 if (callee_method == NULL) tty->print(" callee not monotonic or profiled");
426 if (Verbose && callee_method) {
427 const InlineTree *top = this;
428 while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
429 tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
430 }
431 }
433 //------------------------------ok_to_inline-----------------------------------
434 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci) {
435 assert(callee_method != NULL, "caller checks for optimized virtual!");
436 #ifdef ASSERT
437 // Make sure the incoming jvms has the same information content as me.
438 // This means that we can eventually make this whole class AllStatic.
439 if (jvms->caller() == NULL) {
440 assert(_caller_jvms == NULL, "redundant instance state");
441 } else {
442 assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
443 }
444 assert(_method == jvms->method(), "redundant instance state");
445 #endif
446 const char *failure_msg = NULL;
447 int caller_bci = jvms->bci();
448 ciMethod *caller_method = jvms->method();
450 // Do some initial checks.
451 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
452 if (PrintInlining) {
453 failure_msg = "failed_initial_checks";
454 print_inlining(callee_method, caller_bci, failure_msg);
455 }
456 return NULL;
457 }
459 // Do some parse checks.
460 failure_msg = check_can_parse(callee_method);
461 if (failure_msg != NULL) {
462 if (PrintInlining) print_inlining(callee_method, caller_bci, failure_msg);
463 return NULL;
464 }
466 // Check if inlining policy says no.
467 WarmCallInfo wci = *(initial_wci);
468 failure_msg = try_to_inline(callee_method, caller_method, caller_bci, profile, &wci);
469 if (failure_msg != NULL && C->log() != NULL) {
470 C->log()->begin_elem("inline_fail reason='");
471 C->log()->text("%s", failure_msg);
472 C->log()->end_elem("'");
473 }
475 #ifndef PRODUCT
476 if (UseOldInlining && InlineWarmCalls
477 && (PrintOpto || PrintOptoInlining || PrintInlining)) {
478 bool cold = wci.is_cold();
479 bool hot = !cold && wci.is_hot();
480 bool old_cold = (failure_msg != NULL);
481 if (old_cold != cold || (Verbose || WizardMode)) {
482 tty->print(" OldInlining= %4s : %s\n WCI=",
483 old_cold ? "cold" : "hot", failure_msg ? failure_msg : "OK");
484 wci.print();
485 }
486 }
487 #endif
488 if (UseOldInlining) {
489 if (failure_msg == NULL)
490 wci = *(WarmCallInfo::always_hot());
491 else
492 wci = *(WarmCallInfo::always_cold());
493 }
494 if (!InlineWarmCalls) {
495 if (!wci.is_cold() && !wci.is_hot()) {
496 // Do not inline the warm calls.
497 wci = *(WarmCallInfo::always_cold());
498 }
499 }
501 if (!wci.is_cold()) {
502 // In -UseOldInlining, the failure_msg may also be a success message.
503 if (failure_msg == NULL) failure_msg = "inline (hot)";
505 // Inline!
506 if (PrintInlining) print_inlining(callee_method, caller_bci, failure_msg);
507 if (UseOldInlining)
508 build_inline_tree_for_callee(callee_method, jvms, caller_bci);
509 if (InlineWarmCalls && !wci.is_hot())
510 return new (C) WarmCallInfo(wci); // copy to heap
511 return WarmCallInfo::always_hot();
512 }
514 // Do not inline
515 if (failure_msg == NULL) failure_msg = "too cold to inline";
516 if (PrintInlining) print_inlining(callee_method, caller_bci, failure_msg);
517 return NULL;
518 }
520 //------------------------------compute_callee_frequency-----------------------
521 float InlineTree::compute_callee_frequency( int caller_bci ) const {
522 int count = method()->interpreter_call_site_count(caller_bci);
523 int invcnt = method()->interpreter_invocation_count();
524 float freq = (float)count/(float)invcnt;
525 // Call-site count / interpreter invocation count, scaled recursively.
526 // Always between 0.0 and 1.0. Represents the percentage of the method's
527 // total execution time used at this call site.
529 return freq;
530 }
532 //------------------------------build_inline_tree_for_callee-------------------
533 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
534 float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);
535 // Attempt inlining.
536 InlineTree* old_ilt = callee_at(caller_bci, callee_method);
537 if (old_ilt != NULL) {
538 return old_ilt;
539 }
540 int max_inline_level_adjust = 0;
541 if (caller_jvms->method() != NULL) {
542 if (caller_jvms->method()->is_method_handle_adapter())
543 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames
544 else if (callee_method->is_method_handle_invoke()) {
545 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implem
546 }
547 if (max_inline_level_adjust != 0 && PrintInlining && (Verbose || WizardMode)) {
548 CompileTask::print_inline_indent(inline_level());
549 tty->print_cr(" \\-> discounting inline depth");
550 }
551 if (max_inline_level_adjust != 0 && C->log()) {
552 int id1 = C->log()->identify(caller_jvms->method());
553 int id2 = C->log()->identify(callee_method);
554 C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
555 }
556 }
557 InlineTree* ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust);
558 _subtrees.append(ilt);
560 NOT_PRODUCT( _count_inlines += 1; )
562 return ilt;
563 }
566 //---------------------------------------callee_at-----------------------------
567 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
568 for (int i = 0; i < _subtrees.length(); i++) {
569 InlineTree* sub = _subtrees.at(i);
570 if (sub->caller_bci() == bci && callee == sub->method()) {
571 return sub;
572 }
573 }
574 return NULL;
575 }
578 //------------------------------build_inline_tree_root-------------------------
579 InlineTree *InlineTree::build_inline_tree_root() {
580 Compile* C = Compile::current();
582 // Root of inline tree
583 InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel);
585 return ilt;
586 }
589 //-------------------------find_subtree_from_root-----------------------------
590 // Given a jvms, which determines a call chain from the root method,
591 // find the corresponding inline tree.
592 // Note: This method will be removed or replaced as InlineTree goes away.
593 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee, bool create_if_not_found) {
594 InlineTree* iltp = root;
595 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
596 for (uint d = 1; d <= depth; d++) {
597 JVMState* jvmsp = jvms->of_depth(d);
598 // Select the corresponding subtree for this bci.
599 assert(jvmsp->method() == iltp->method(), "tree still in sync");
600 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
601 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
602 if (!sub) {
603 if (create_if_not_found && d == depth) {
604 return iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
605 }
606 assert(sub != NULL, "should be a sub-ilt here");
607 return NULL;
608 }
609 iltp = sub;
610 }
611 return iltp;
612 }
616 #ifndef PRODUCT
617 void InlineTree::print_impl(outputStream* st, int indent) const {
618 for (int i = 0; i < indent; i++) st->print(" ");
619 st->print(" @ %d ", caller_bci());
620 method()->print_short_name(st);
621 st->cr();
623 for (int i = 0 ; i < _subtrees.length(); i++) {
624 _subtrees.at(i)->print_impl(st, indent + 2);
625 }
626 }
628 void InlineTree::print_value_on(outputStream* st) const {
629 print_impl(st, 2);
630 }
631 #endif