src/share/vm/opto/bytecodeInfo.cpp

changeset 0
f90c822e73f8
child 6876
710a3c8b516e
equal deleted inserted replaced
-1:000000000000 0:f90c822e73f8
1 /*
2 * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "ci/ciReplay.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "compiler/compileBroker.hpp"
30 #include "compiler/compileLog.hpp"
31 #include "interpreter/linkResolver.hpp"
32 #include "oops/objArrayKlass.hpp"
33 #include "opto/callGenerator.hpp"
34 #include "opto/parse.hpp"
35 #include "runtime/handles.inline.hpp"
36
37 //=============================================================================
38 //------------------------------InlineTree-------------------------------------
39 InlineTree::InlineTree(Compile* c,
40 const InlineTree *caller_tree, ciMethod* callee,
41 JVMState* caller_jvms, int caller_bci,
42 float site_invoke_ratio, int max_inline_level) :
43 C(c),
44 _caller_jvms(caller_jvms),
45 _caller_tree((InlineTree*) caller_tree),
46 _method(callee),
47 _site_invoke_ratio(site_invoke_ratio),
48 _max_inline_level(max_inline_level),
49 _count_inline_bcs(method()->code_size_for_inlining()),
50 _subtrees(c->comp_arena(), 2, 0, NULL),
51 _msg(NULL)
52 {
53 #ifndef PRODUCT
54 _count_inlines = 0;
55 _forced_inline = false;
56 #endif
57 if (_caller_jvms != NULL) {
58 // Keep a private copy of the caller_jvms:
59 _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms());
60 _caller_jvms->set_bci(caller_jvms->bci());
61 assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining");
62 }
63 assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS");
64 assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter");
65 assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter");
66 // Update hierarchical counts, count_inline_bcs() and count_inlines()
67 InlineTree *caller = (InlineTree *)caller_tree;
68 for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) {
69 caller->_count_inline_bcs += count_inline_bcs();
70 NOT_PRODUCT(caller->_count_inlines++;)
71 }
72 }
73
74 /**
75 * Return true when EA is ON and a java constructor is called or
76 * a super constructor is called from an inlined java constructor.
77 * Also return true for boxing methods.
78 */
79 static bool is_init_with_ea(ciMethod* callee_method,
80 ciMethod* caller_method, Compile* C) {
81 if (!C->do_escape_analysis() || !EliminateAllocations) {
82 return false; // EA is off
83 }
84 if (callee_method->is_initializer()) {
85 return true; // constuctor
86 }
87 if (caller_method->is_initializer() &&
88 caller_method != C->method() &&
89 caller_method->holder()->is_subclass_of(callee_method->holder())) {
90 return true; // super constructor is called from inlined constructor
91 }
92 if (C->eliminate_boxing() && callee_method->is_boxing_method()) {
93 return true;
94 }
95 return false;
96 }
97
98 /**
99 * Force inlining unboxing accessor.
100 */
101 static bool is_unboxing_method(ciMethod* callee_method, Compile* C) {
102 return C->eliminate_boxing() && callee_method->is_unboxing_method();
103 }
104
105 // positive filter: should callee be inlined?
106 bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method,
107 int caller_bci, ciCallProfile& profile,
108 WarmCallInfo* wci_result) {
109 // Allows targeted inlining
110 if(callee_method->should_inline()) {
111 *wci_result = *(WarmCallInfo::always_hot());
112 if (C->print_inlining() && Verbose) {
113 CompileTask::print_inline_indent(inline_level());
114 tty->print_cr("Inlined method is hot: ");
115 }
116 set_msg("force inline by CompilerOracle");
117 _forced_inline = true;
118 return true;
119 }
120
121 #ifndef PRODUCT
122 int inline_depth = inline_level()+1;
123 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
124 set_msg("force inline by ciReplay");
125 _forced_inline = true;
126 return true;
127 }
128 #endif
129
130 int size = callee_method->code_size_for_inlining();
131
132 // Check for too many throws (and not too huge)
133 if(callee_method->interpreter_throwout_count() > InlineThrowCount &&
134 size < InlineThrowMaxSize ) {
135 wci_result->set_profit(wci_result->profit() * 100);
136 if (C->print_inlining() && Verbose) {
137 CompileTask::print_inline_indent(inline_level());
138 tty->print_cr("Inlined method with many throws (throws=%d):", callee_method->interpreter_throwout_count());
139 }
140 set_msg("many throws");
141 return true;
142 }
143
144 int default_max_inline_size = C->max_inline_size();
145 int inline_small_code_size = InlineSmallCode / 4;
146 int max_inline_size = default_max_inline_size;
147
148 int call_site_count = method()->scale_count(profile.count());
149 int invoke_count = method()->interpreter_invocation_count();
150
151 assert(invoke_count != 0, "require invocation count greater than zero");
152 int freq = call_site_count / invoke_count;
153
154 // bump the max size if the call is frequent
155 if ((freq >= InlineFrequencyRatio) ||
156 (call_site_count >= InlineFrequencyCount) ||
157 is_unboxing_method(callee_method, C) ||
158 is_init_with_ea(callee_method, caller_method, C)) {
159
160 max_inline_size = C->freq_inline_size();
161 if (size <= max_inline_size && TraceFrequencyInlining) {
162 CompileTask::print_inline_indent(inline_level());
163 tty->print_cr("Inlined frequent method (freq=%d count=%d):", freq, call_site_count);
164 CompileTask::print_inline_indent(inline_level());
165 callee_method->print();
166 tty->cr();
167 }
168 } else {
169 // Not hot. Check for medium-sized pre-existing nmethod at cold sites.
170 if (callee_method->has_compiled_code() &&
171 callee_method->instructions_size() > inline_small_code_size) {
172 set_msg("already compiled into a medium method");
173 return false;
174 }
175 }
176 if (size > max_inline_size) {
177 if (max_inline_size > default_max_inline_size) {
178 set_msg("hot method too big");
179 } else {
180 set_msg("too big");
181 }
182 return false;
183 }
184 return true;
185 }
186
187
188 // negative filter: should callee NOT be inlined?
189 bool InlineTree::should_not_inline(ciMethod *callee_method,
190 ciMethod* caller_method,
191 JVMState* jvms,
192 WarmCallInfo* wci_result) {
193
194 const char* fail_msg = NULL;
195
196 // First check all inlining restrictions which are required for correctness
197 if ( callee_method->is_abstract()) {
198 fail_msg = "abstract method"; // // note: we allow ik->is_abstract()
199 } else if (!callee_method->holder()->is_initialized()) {
200 fail_msg = "method holder not initialized";
201 } else if ( callee_method->is_native()) {
202 fail_msg = "native method";
203 } else if ( callee_method->dont_inline()) {
204 fail_msg = "don't inline by annotation";
205 }
206
207 // one more inlining restriction
208 if (fail_msg == NULL && callee_method->has_unloaded_classes_in_signature()) {
209 fail_msg = "unloaded signature classes";
210 }
211
212 if (fail_msg != NULL) {
213 set_msg(fail_msg);
214 return true;
215 }
216
217 // ignore heuristic controls on inlining
218 if (callee_method->should_inline()) {
219 set_msg("force inline by CompilerOracle");
220 return false;
221 }
222
223 if (callee_method->should_not_inline()) {
224 set_msg("disallowed by CompilerOracle");
225 return true;
226 }
227
228 #ifndef PRODUCT
229 int caller_bci = jvms->bci();
230 int inline_depth = inline_level()+1;
231 if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
232 set_msg("force inline by ciReplay");
233 return false;
234 }
235
236 if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) {
237 set_msg("disallowed by ciReplay");
238 return true;
239 }
240
241 if (ciReplay::should_not_inline(callee_method)) {
242 set_msg("disallowed by ciReplay");
243 return true;
244 }
245 #endif
246
247 // Now perform checks which are heuristic
248
249 if (is_unboxing_method(callee_method, C)) {
250 // Inline unboxing methods.
251 return false;
252 }
253
254 if (!callee_method->force_inline()) {
255 if (callee_method->has_compiled_code() &&
256 callee_method->instructions_size() > InlineSmallCode) {
257 set_msg("already compiled into a big method");
258 return true;
259 }
260 }
261
262 // don't inline exception code unless the top method belongs to an
263 // exception class
264 if (caller_tree() != NULL &&
265 callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) {
266 const InlineTree *top = this;
267 while (top->caller_tree() != NULL) top = top->caller_tree();
268 ciInstanceKlass* k = top->method()->holder();
269 if (!k->is_subclass_of(C->env()->Throwable_klass())) {
270 set_msg("exception method");
271 return true;
272 }
273 }
274
275 // use frequency-based objections only for non-trivial methods
276 if (callee_method->code_size() <= MaxTrivialSize) {
277 return false;
278 }
279
280 // don't use counts with -Xcomp or CTW
281 if (UseInterpreter && !CompileTheWorld) {
282
283 if (!callee_method->has_compiled_code() &&
284 !callee_method->was_executed_more_than(0)) {
285 set_msg("never executed");
286 return true;
287 }
288
289 if (is_init_with_ea(callee_method, caller_method, C)) {
290 // Escape Analysis: inline all executed constructors
291 return false;
292 } else if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold,
293 CompileThreshold >> 1))) {
294 set_msg("executed < MinInliningThreshold times");
295 return true;
296 }
297 }
298
299 return false;
300 }
301
302 //-----------------------------try_to_inline-----------------------------------
303 // return true if ok
304 // Relocated from "InliningClosure::try_to_inline"
305 bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method,
306 int caller_bci, JVMState* jvms, ciCallProfile& profile,
307 WarmCallInfo* wci_result, bool& should_delay) {
308
309 if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) {
310 if (!callee_method->force_inline() || !IncrementalInline) {
311 set_msg("size > DesiredMethodLimit");
312 return false;
313 } else if (!C->inlining_incrementally()) {
314 should_delay = true;
315 }
316 }
317
318 _forced_inline = false; // Reset
319 if (!should_inline(callee_method, caller_method, caller_bci, profile,
320 wci_result)) {
321 return false;
322 }
323 if (should_not_inline(callee_method, caller_method, jvms, wci_result)) {
324 return false;
325 }
326
327 if (InlineAccessors && callee_method->is_accessor()) {
328 // accessor methods are not subject to any of the following limits.
329 set_msg("accessor");
330 return true;
331 }
332
333 // suppress a few checks for accessors and trivial methods
334 if (callee_method->code_size() > MaxTrivialSize) {
335
336 // don't inline into giant methods
337 if (C->over_inlining_cutoff()) {
338 if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form())
339 || !IncrementalInline) {
340 set_msg("NodeCountInliningCutoff");
341 return false;
342 } else {
343 should_delay = true;
344 }
345 }
346
347 if ((!UseInterpreter || CompileTheWorld) &&
348 is_init_with_ea(callee_method, caller_method, C)) {
349 // Escape Analysis stress testing when running Xcomp or CTW:
350 // inline constructors even if they are not reached.
351 } else if (forced_inline()) {
352 // Inlining was forced by CompilerOracle or ciReplay
353 } else if (profile.count() == 0) {
354 // don't inline unreached call sites
355 set_msg("call site not reached");
356 return false;
357 }
358 }
359
360 if (!C->do_inlining() && InlineAccessors) {
361 set_msg("not an accessor");
362 return false;
363 }
364
365 // Limit inlining depth in case inlining is forced or
366 // _max_inline_level was increased to compensate for lambda forms.
367 if (inline_level() > MaxForceInlineLevel) {
368 set_msg("MaxForceInlineLevel");
369 return false;
370 }
371 if (inline_level() > _max_inline_level) {
372 if (!callee_method->force_inline() || !IncrementalInline) {
373 set_msg("inlining too deep");
374 return false;
375 } else if (!C->inlining_incrementally()) {
376 should_delay = true;
377 }
378 }
379
380 // detect direct and indirect recursive inlining
381 {
382 // count the current method and the callee
383 const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form();
384 int inline_level = 0;
385 if (!is_compiled_lambda_form) {
386 if (method() == callee_method) {
387 inline_level++;
388 }
389 }
390 // count callers of current method and callee
391 Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : NULL;
392 for (JVMState* j = jvms->caller(); j != NULL && j->has_method(); j = j->caller()) {
393 if (j->method() == callee_method) {
394 if (is_compiled_lambda_form) {
395 // Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly
396 // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the
397 // compiler stack.
398 Node* caller_argument0 = j->map()->argument(j, 0)->uncast();
399 if (caller_argument0 == callee_argument0) {
400 inline_level++;
401 }
402 } else {
403 inline_level++;
404 }
405 }
406 }
407 if (inline_level > MaxRecursiveInlineLevel) {
408 set_msg("recursive inlining is too deep");
409 return false;
410 }
411 }
412
413 int size = callee_method->code_size_for_inlining();
414
415 if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) {
416 if (!callee_method->force_inline() || !IncrementalInline) {
417 set_msg("size > DesiredMethodLimit");
418 return false;
419 } else if (!C->inlining_incrementally()) {
420 should_delay = true;
421 }
422 }
423
424 // ok, inline this method
425 return true;
426 }
427
428 //------------------------------pass_initial_checks----------------------------
429 bool pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) {
430 ciInstanceKlass *callee_holder = callee_method ? callee_method->holder() : NULL;
431 // Check if a callee_method was suggested
432 if( callee_method == NULL ) return false;
433 // Check if klass of callee_method is loaded
434 if( !callee_holder->is_loaded() ) return false;
435 if( !callee_holder->is_initialized() ) return false;
436 if( !UseInterpreter || CompileTheWorld /* running Xcomp or CTW */ ) {
437 // Checks that constant pool's call site has been visited
438 // stricter than callee_holder->is_initialized()
439 ciBytecodeStream iter(caller_method);
440 iter.force_bci(caller_bci);
441 Bytecodes::Code call_bc = iter.cur_bc();
442 // An invokedynamic instruction does not have a klass.
443 if (call_bc != Bytecodes::_invokedynamic) {
444 int index = iter.get_index_u2_cpcache();
445 if (!caller_method->is_klass_loaded(index, true)) {
446 return false;
447 }
448 // Try to do constant pool resolution if running Xcomp
449 if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) {
450 return false;
451 }
452 }
453 }
454 // We will attempt to see if a class/field/etc got properly loaded. If it
455 // did not, it may attempt to throw an exception during our probing. Catch
456 // and ignore such exceptions and do not attempt to compile the method.
457 if( callee_method->should_exclude() ) return false;
458
459 return true;
460 }
461
462 //------------------------------check_can_parse--------------------------------
463 const char* InlineTree::check_can_parse(ciMethod* callee) {
464 // Certain methods cannot be parsed at all:
465 if ( callee->is_native()) return "native method";
466 if ( callee->is_abstract()) return "abstract method";
467 if (!callee->can_be_compiled()) return "not compilable (disabled)";
468 if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)";
469 if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)";
470 return NULL;
471 }
472
473 //------------------------------print_inlining---------------------------------
474 void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci,
475 bool success) const {
476 const char* inline_msg = msg();
477 assert(inline_msg != NULL, "just checking");
478 if (C->log() != NULL) {
479 if (success) {
480 C->log()->inline_success(inline_msg);
481 } else {
482 C->log()->inline_fail(inline_msg);
483 }
484 }
485 if (C->print_inlining()) {
486 C->print_inlining(callee_method, inline_level(), caller_bci, inline_msg);
487 if (callee_method == NULL) tty->print(" callee not monotonic or profiled");
488 if (Verbose && callee_method) {
489 const InlineTree *top = this;
490 while( top->caller_tree() != NULL ) { top = top->caller_tree(); }
491 //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count());
492 }
493 }
494 }
495
496 //------------------------------ok_to_inline-----------------------------------
497 WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) {
498 assert(callee_method != NULL, "caller checks for optimized virtual!");
499 assert(!should_delay, "should be initialized to false");
500 #ifdef ASSERT
501 // Make sure the incoming jvms has the same information content as me.
502 // This means that we can eventually make this whole class AllStatic.
503 if (jvms->caller() == NULL) {
504 assert(_caller_jvms == NULL, "redundant instance state");
505 } else {
506 assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state");
507 }
508 assert(_method == jvms->method(), "redundant instance state");
509 #endif
510 int caller_bci = jvms->bci();
511 ciMethod* caller_method = jvms->method();
512
513 // Do some initial checks.
514 if (!pass_initial_checks(caller_method, caller_bci, callee_method)) {
515 set_msg("failed initial checks");
516 print_inlining(callee_method, caller_bci, false /* !success */);
517 return NULL;
518 }
519
520 // Do some parse checks.
521 set_msg(check_can_parse(callee_method));
522 if (msg() != NULL) {
523 print_inlining(callee_method, caller_bci, false /* !success */);
524 return NULL;
525 }
526
527 // Check if inlining policy says no.
528 WarmCallInfo wci = *(initial_wci);
529 bool success = try_to_inline(callee_method, caller_method, caller_bci,
530 jvms, profile, &wci, should_delay);
531
532 #ifndef PRODUCT
533 if (InlineWarmCalls && (PrintOpto || C->print_inlining())) {
534 bool cold = wci.is_cold();
535 bool hot = !cold && wci.is_hot();
536 bool old_cold = !success;
537 if (old_cold != cold || (Verbose || WizardMode)) {
538 if (msg() == NULL) {
539 set_msg("OK");
540 }
541 tty->print(" OldInlining= %4s : %s\n WCI=",
542 old_cold ? "cold" : "hot", msg());
543 wci.print();
544 }
545 }
546 #endif
547 if (success) {
548 wci = *(WarmCallInfo::always_hot());
549 } else {
550 wci = *(WarmCallInfo::always_cold());
551 }
552
553 if (!InlineWarmCalls) {
554 if (!wci.is_cold() && !wci.is_hot()) {
555 // Do not inline the warm calls.
556 wci = *(WarmCallInfo::always_cold());
557 }
558 }
559
560 if (!wci.is_cold()) {
561 // Inline!
562 if (msg() == NULL) {
563 set_msg("inline (hot)");
564 }
565 print_inlining(callee_method, caller_bci, true /* success */);
566 build_inline_tree_for_callee(callee_method, jvms, caller_bci);
567 if (InlineWarmCalls && !wci.is_hot())
568 return new (C) WarmCallInfo(wci); // copy to heap
569 return WarmCallInfo::always_hot();
570 }
571
572 // Do not inline
573 if (msg() == NULL) {
574 set_msg("too cold to inline");
575 }
576 print_inlining(callee_method, caller_bci, false /* !success */ );
577 return NULL;
578 }
579
580 //------------------------------compute_callee_frequency-----------------------
581 float InlineTree::compute_callee_frequency( int caller_bci ) const {
582 int count = method()->interpreter_call_site_count(caller_bci);
583 int invcnt = method()->interpreter_invocation_count();
584 float freq = (float)count/(float)invcnt;
585 // Call-site count / interpreter invocation count, scaled recursively.
586 // Always between 0.0 and 1.0. Represents the percentage of the method's
587 // total execution time used at this call site.
588
589 return freq;
590 }
591
592 //------------------------------build_inline_tree_for_callee-------------------
593 InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) {
594 float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci);
595 // Attempt inlining.
596 InlineTree* old_ilt = callee_at(caller_bci, callee_method);
597 if (old_ilt != NULL) {
598 return old_ilt;
599 }
600 int max_inline_level_adjust = 0;
601 if (caller_jvms->method() != NULL) {
602 if (caller_jvms->method()->is_compiled_lambda_form())
603 max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames
604 else if (callee_method->is_method_handle_intrinsic() ||
605 callee_method->is_compiled_lambda_form()) {
606 max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implem
607 }
608 if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) {
609 CompileTask::print_inline_indent(inline_level());
610 tty->print_cr(" \\-> discounting inline depth");
611 }
612 if (max_inline_level_adjust != 0 && C->log()) {
613 int id1 = C->log()->identify(caller_jvms->method());
614 int id2 = C->log()->identify(callee_method);
615 C->log()->elem("inline_level_discount caller='%d' callee='%d'", id1, id2);
616 }
617 }
618 InlineTree* ilt = new InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust);
619 _subtrees.append(ilt);
620
621 NOT_PRODUCT( _count_inlines += 1; )
622
623 return ilt;
624 }
625
626
627 //---------------------------------------callee_at-----------------------------
628 InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const {
629 for (int i = 0; i < _subtrees.length(); i++) {
630 InlineTree* sub = _subtrees.at(i);
631 if (sub->caller_bci() == bci && callee == sub->method()) {
632 return sub;
633 }
634 }
635 return NULL;
636 }
637
638
639 //------------------------------build_inline_tree_root-------------------------
640 InlineTree *InlineTree::build_inline_tree_root() {
641 Compile* C = Compile::current();
642
643 // Root of inline tree
644 InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel);
645
646 return ilt;
647 }
648
649
650 //-------------------------find_subtree_from_root-----------------------------
651 // Given a jvms, which determines a call chain from the root method,
652 // find the corresponding inline tree.
653 // Note: This method will be removed or replaced as InlineTree goes away.
654 InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) {
655 InlineTree* iltp = root;
656 uint depth = jvms && jvms->has_method() ? jvms->depth() : 0;
657 for (uint d = 1; d <= depth; d++) {
658 JVMState* jvmsp = jvms->of_depth(d);
659 // Select the corresponding subtree for this bci.
660 assert(jvmsp->method() == iltp->method(), "tree still in sync");
661 ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method();
662 InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee);
663 if (sub == NULL) {
664 if (d == depth) {
665 sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci());
666 }
667 guarantee(sub != NULL, "should be a sub-ilt here");
668 return sub;
669 }
670 iltp = sub;
671 }
672 return iltp;
673 }
674
675 // Count number of nodes in this subtree
676 int InlineTree::count() const {
677 int result = 1;
678 for (int i = 0 ; i < _subtrees.length(); i++) {
679 result += _subtrees.at(i)->count();
680 }
681 return result;
682 }
683
684 void InlineTree::dump_replay_data(outputStream* out) {
685 out->print(" %d %d ", inline_level(), caller_bci());
686 method()->dump_name_as_ascii(out);
687 for (int i = 0 ; i < _subtrees.length(); i++) {
688 _subtrees.at(i)->dump_replay_data(out);
689 }
690 }
691
692
693 #ifndef PRODUCT
694 void InlineTree::print_impl(outputStream* st, int indent) const {
695 for (int i = 0; i < indent; i++) st->print(" ");
696 st->print(" @ %d", caller_bci());
697 method()->print_short_name(st);
698 st->cr();
699
700 for (int i = 0 ; i < _subtrees.length(); i++) {
701 _subtrees.at(i)->print_impl(st, indent + 2);
702 }
703 }
704
705 void InlineTree::print_value_on(outputStream* st) const {
706 print_impl(st, 2);
707 }
708 #endif

mercurial