src/share/vm/c1/c1_GraphBuilder.cpp

Thu, 07 Apr 2011 09:53:20 -0700

author
johnc
date
Thu, 07 Apr 2011 09:53:20 -0700
changeset 2781
e1162778c1c8
parent 2658
c7f3d0b4570f
child 2784
92add02409c9
permissions
-rw-r--r--

7009266: G1: assert(obj->is_oop_or_null(true )) failed: Error
Summary: A referent object that is only weakly reachable at the start of concurrent marking but is re-attached to the strongly reachable object graph during marking may not be marked as live. This can cause the reference object to be processed prematurely and leave dangling pointers to the referent object. Implement a read barrier for the java.lang.ref.Reference::referent field by intrinsifying the Reference.get() method, and intercepting accesses though JNI, reflection, and Unsafe, so that when a non-null referent object is read it is also logged in an SATB buffer.
Reviewed-by: kvn, iveresov, never, tonyp, dholmes

duke@435 1 /*
never@2486 2 * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
duke@435 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
duke@435 4 *
duke@435 5 * This code is free software; you can redistribute it and/or modify it
duke@435 6 * under the terms of the GNU General Public License version 2 only, as
duke@435 7 * published by the Free Software Foundation.
duke@435 8 *
duke@435 9 * This code is distributed in the hope that it will be useful, but WITHOUT
duke@435 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
duke@435 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
duke@435 12 * version 2 for more details (a copy is included in the LICENSE file that
duke@435 13 * accompanied this code).
duke@435 14 *
duke@435 15 * You should have received a copy of the GNU General Public License version
duke@435 16 * 2 along with this work; if not, write to the Free Software Foundation,
duke@435 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
duke@435 18 *
trims@1907 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
trims@1907 20 * or visit www.oracle.com if you need additional information or have any
trims@1907 21 * questions.
duke@435 22 *
duke@435 23 */
duke@435 24
stefank@2314 25 #include "precompiled.hpp"
stefank@2314 26 #include "c1/c1_CFGPrinter.hpp"
stefank@2314 27 #include "c1/c1_Canonicalizer.hpp"
stefank@2314 28 #include "c1/c1_Compilation.hpp"
stefank@2314 29 #include "c1/c1_GraphBuilder.hpp"
stefank@2314 30 #include "c1/c1_InstructionPrinter.hpp"
stefank@2314 31 #include "ci/ciField.hpp"
stefank@2314 32 #include "ci/ciKlass.hpp"
stefank@2314 33 #include "interpreter/bytecode.hpp"
stefank@2314 34 #include "runtime/sharedRuntime.hpp"
stefank@2314 35 #include "utilities/bitMap.inline.hpp"
duke@435 36
duke@435 37 class BlockListBuilder VALUE_OBJ_CLASS_SPEC {
duke@435 38 private:
duke@435 39 Compilation* _compilation;
duke@435 40 IRScope* _scope;
duke@435 41
duke@435 42 BlockList _blocks; // internal list of all blocks
duke@435 43 BlockList* _bci2block; // mapping from bci to blocks for GraphBuilder
duke@435 44
duke@435 45 // fields used by mark_loops
duke@435 46 BitMap _active; // for iteration of control flow graph
duke@435 47 BitMap _visited; // for iteration of control flow graph
duke@435 48 intArray _loop_map; // caches the information if a block is contained in a loop
duke@435 49 int _next_loop_index; // next free loop number
duke@435 50 int _next_block_number; // for reverse postorder numbering of blocks
duke@435 51
duke@435 52 // accessors
duke@435 53 Compilation* compilation() const { return _compilation; }
duke@435 54 IRScope* scope() const { return _scope; }
duke@435 55 ciMethod* method() const { return scope()->method(); }
duke@435 56 XHandlers* xhandlers() const { return scope()->xhandlers(); }
duke@435 57
duke@435 58 // unified bailout support
duke@435 59 void bailout(const char* msg) const { compilation()->bailout(msg); }
duke@435 60 bool bailed_out() const { return compilation()->bailed_out(); }
duke@435 61
duke@435 62 // helper functions
duke@435 63 BlockBegin* make_block_at(int bci, BlockBegin* predecessor);
duke@435 64 void handle_exceptions(BlockBegin* current, int cur_bci);
duke@435 65 void handle_jsr(BlockBegin* current, int sr_bci, int next_bci);
duke@435 66 void store_one(BlockBegin* current, int local);
duke@435 67 void store_two(BlockBegin* current, int local);
duke@435 68 void set_entries(int osr_bci);
duke@435 69 void set_leaders();
duke@435 70
duke@435 71 void make_loop_header(BlockBegin* block);
duke@435 72 void mark_loops();
duke@435 73 int mark_loops(BlockBegin* b, bool in_subroutine);
duke@435 74
duke@435 75 // debugging
duke@435 76 #ifndef PRODUCT
duke@435 77 void print();
duke@435 78 #endif
duke@435 79
duke@435 80 public:
duke@435 81 // creation
duke@435 82 BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci);
duke@435 83
duke@435 84 // accessors for GraphBuilder
duke@435 85 BlockList* bci2block() const { return _bci2block; }
duke@435 86 };
duke@435 87
duke@435 88
duke@435 89 // Implementation of BlockListBuilder
duke@435 90
duke@435 91 BlockListBuilder::BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci)
duke@435 92 : _compilation(compilation)
duke@435 93 , _scope(scope)
duke@435 94 , _blocks(16)
duke@435 95 , _bci2block(new BlockList(scope->method()->code_size(), NULL))
duke@435 96 , _next_block_number(0)
duke@435 97 , _active() // size not known yet
duke@435 98 , _visited() // size not known yet
duke@435 99 , _next_loop_index(0)
duke@435 100 , _loop_map() // size not known yet
duke@435 101 {
duke@435 102 set_entries(osr_bci);
duke@435 103 set_leaders();
duke@435 104 CHECK_BAILOUT();
duke@435 105
duke@435 106 mark_loops();
duke@435 107 NOT_PRODUCT(if (PrintInitialBlockList) print());
duke@435 108
duke@435 109 #ifndef PRODUCT
duke@435 110 if (PrintCFGToFile) {
duke@435 111 stringStream title;
duke@435 112 title.print("BlockListBuilder ");
duke@435 113 scope->method()->print_name(&title);
duke@435 114 CFGPrinter::print_cfg(_bci2block, title.as_string(), false, false);
duke@435 115 }
duke@435 116 #endif
duke@435 117 }
duke@435 118
duke@435 119
duke@435 120 void BlockListBuilder::set_entries(int osr_bci) {
duke@435 121 // generate start blocks
duke@435 122 BlockBegin* std_entry = make_block_at(0, NULL);
duke@435 123 if (scope()->caller() == NULL) {
duke@435 124 std_entry->set(BlockBegin::std_entry_flag);
duke@435 125 }
duke@435 126 if (osr_bci != -1) {
duke@435 127 BlockBegin* osr_entry = make_block_at(osr_bci, NULL);
duke@435 128 osr_entry->set(BlockBegin::osr_entry_flag);
duke@435 129 }
duke@435 130
duke@435 131 // generate exception entry blocks
duke@435 132 XHandlers* list = xhandlers();
duke@435 133 const int n = list->length();
duke@435 134 for (int i = 0; i < n; i++) {
duke@435 135 XHandler* h = list->handler_at(i);
duke@435 136 BlockBegin* entry = make_block_at(h->handler_bci(), NULL);
duke@435 137 entry->set(BlockBegin::exception_entry_flag);
duke@435 138 h->set_entry_block(entry);
duke@435 139 }
duke@435 140 }
duke@435 141
duke@435 142
duke@435 143 BlockBegin* BlockListBuilder::make_block_at(int cur_bci, BlockBegin* predecessor) {
duke@435 144 assert(method()->bci_block_start().at(cur_bci), "wrong block starts of MethodLivenessAnalyzer");
duke@435 145
duke@435 146 BlockBegin* block = _bci2block->at(cur_bci);
duke@435 147 if (block == NULL) {
duke@435 148 block = new BlockBegin(cur_bci);
duke@435 149 block->init_stores_to_locals(method()->max_locals());
duke@435 150 _bci2block->at_put(cur_bci, block);
duke@435 151 _blocks.append(block);
duke@435 152
duke@435 153 assert(predecessor == NULL || predecessor->bci() < cur_bci, "targets for backward branches must already exist");
duke@435 154 }
duke@435 155
duke@435 156 if (predecessor != NULL) {
duke@435 157 if (block->is_set(BlockBegin::exception_entry_flag)) {
duke@435 158 BAILOUT_("Exception handler can be reached by both normal and exceptional control flow", block);
duke@435 159 }
duke@435 160
duke@435 161 predecessor->add_successor(block);
duke@435 162 block->increment_total_preds();
duke@435 163 }
duke@435 164
duke@435 165 return block;
duke@435 166 }
duke@435 167
duke@435 168
duke@435 169 inline void BlockListBuilder::store_one(BlockBegin* current, int local) {
duke@435 170 current->stores_to_locals().set_bit(local);
duke@435 171 }
duke@435 172 inline void BlockListBuilder::store_two(BlockBegin* current, int local) {
duke@435 173 store_one(current, local);
duke@435 174 store_one(current, local + 1);
duke@435 175 }
duke@435 176
duke@435 177
duke@435 178 void BlockListBuilder::handle_exceptions(BlockBegin* current, int cur_bci) {
duke@435 179 // Draws edges from a block to its exception handlers
duke@435 180 XHandlers* list = xhandlers();
duke@435 181 const int n = list->length();
duke@435 182
duke@435 183 for (int i = 0; i < n; i++) {
duke@435 184 XHandler* h = list->handler_at(i);
duke@435 185
duke@435 186 if (h->covers(cur_bci)) {
duke@435 187 BlockBegin* entry = h->entry_block();
duke@435 188 assert(entry != NULL && entry == _bci2block->at(h->handler_bci()), "entry must be set");
duke@435 189 assert(entry->is_set(BlockBegin::exception_entry_flag), "flag must be set");
duke@435 190
duke@435 191 // add each exception handler only once
duke@435 192 if (!current->is_successor(entry)) {
duke@435 193 current->add_successor(entry);
duke@435 194 entry->increment_total_preds();
duke@435 195 }
duke@435 196
duke@435 197 // stop when reaching catchall
duke@435 198 if (h->catch_type() == 0) break;
duke@435 199 }
duke@435 200 }
duke@435 201 }
duke@435 202
duke@435 203 void BlockListBuilder::handle_jsr(BlockBegin* current, int sr_bci, int next_bci) {
duke@435 204 // start a new block after jsr-bytecode and link this block into cfg
duke@435 205 make_block_at(next_bci, current);
duke@435 206
duke@435 207 // start a new block at the subroutine entry at mark it with special flag
duke@435 208 BlockBegin* sr_block = make_block_at(sr_bci, current);
duke@435 209 if (!sr_block->is_set(BlockBegin::subroutine_entry_flag)) {
duke@435 210 sr_block->set(BlockBegin::subroutine_entry_flag);
duke@435 211 }
duke@435 212 }
duke@435 213
duke@435 214
duke@435 215 void BlockListBuilder::set_leaders() {
duke@435 216 bool has_xhandlers = xhandlers()->has_handlers();
duke@435 217 BlockBegin* current = NULL;
duke@435 218
duke@435 219 // The information which bci starts a new block simplifies the analysis
duke@435 220 // Without it, backward branches could jump to a bci where no block was created
duke@435 221 // during bytecode iteration. This would require the creation of a new block at the
duke@435 222 // branch target and a modification of the successor lists.
duke@435 223 BitMap bci_block_start = method()->bci_block_start();
duke@435 224
duke@435 225 ciBytecodeStream s(method());
duke@435 226 while (s.next() != ciBytecodeStream::EOBC()) {
duke@435 227 int cur_bci = s.cur_bci();
duke@435 228
duke@435 229 if (bci_block_start.at(cur_bci)) {
duke@435 230 current = make_block_at(cur_bci, current);
duke@435 231 }
duke@435 232 assert(current != NULL, "must have current block");
duke@435 233
duke@435 234 if (has_xhandlers && GraphBuilder::can_trap(method(), s.cur_bc())) {
duke@435 235 handle_exceptions(current, cur_bci);
duke@435 236 }
duke@435 237
duke@435 238 switch (s.cur_bc()) {
duke@435 239 // track stores to local variables for selective creation of phi functions
duke@435 240 case Bytecodes::_iinc: store_one(current, s.get_index()); break;
duke@435 241 case Bytecodes::_istore: store_one(current, s.get_index()); break;
duke@435 242 case Bytecodes::_lstore: store_two(current, s.get_index()); break;
duke@435 243 case Bytecodes::_fstore: store_one(current, s.get_index()); break;
duke@435 244 case Bytecodes::_dstore: store_two(current, s.get_index()); break;
duke@435 245 case Bytecodes::_astore: store_one(current, s.get_index()); break;
duke@435 246 case Bytecodes::_istore_0: store_one(current, 0); break;
duke@435 247 case Bytecodes::_istore_1: store_one(current, 1); break;
duke@435 248 case Bytecodes::_istore_2: store_one(current, 2); break;
duke@435 249 case Bytecodes::_istore_3: store_one(current, 3); break;
duke@435 250 case Bytecodes::_lstore_0: store_two(current, 0); break;
duke@435 251 case Bytecodes::_lstore_1: store_two(current, 1); break;
duke@435 252 case Bytecodes::_lstore_2: store_two(current, 2); break;
duke@435 253 case Bytecodes::_lstore_3: store_two(current, 3); break;
duke@435 254 case Bytecodes::_fstore_0: store_one(current, 0); break;
duke@435 255 case Bytecodes::_fstore_1: store_one(current, 1); break;
duke@435 256 case Bytecodes::_fstore_2: store_one(current, 2); break;
duke@435 257 case Bytecodes::_fstore_3: store_one(current, 3); break;
duke@435 258 case Bytecodes::_dstore_0: store_two(current, 0); break;
duke@435 259 case Bytecodes::_dstore_1: store_two(current, 1); break;
duke@435 260 case Bytecodes::_dstore_2: store_two(current, 2); break;
duke@435 261 case Bytecodes::_dstore_3: store_two(current, 3); break;
duke@435 262 case Bytecodes::_astore_0: store_one(current, 0); break;
duke@435 263 case Bytecodes::_astore_1: store_one(current, 1); break;
duke@435 264 case Bytecodes::_astore_2: store_one(current, 2); break;
duke@435 265 case Bytecodes::_astore_3: store_one(current, 3); break;
duke@435 266
duke@435 267 // track bytecodes that affect the control flow
duke@435 268 case Bytecodes::_athrow: // fall through
duke@435 269 case Bytecodes::_ret: // fall through
duke@435 270 case Bytecodes::_ireturn: // fall through
duke@435 271 case Bytecodes::_lreturn: // fall through
duke@435 272 case Bytecodes::_freturn: // fall through
duke@435 273 case Bytecodes::_dreturn: // fall through
duke@435 274 case Bytecodes::_areturn: // fall through
duke@435 275 case Bytecodes::_return:
duke@435 276 current = NULL;
duke@435 277 break;
duke@435 278
duke@435 279 case Bytecodes::_ifeq: // fall through
duke@435 280 case Bytecodes::_ifne: // fall through
duke@435 281 case Bytecodes::_iflt: // fall through
duke@435 282 case Bytecodes::_ifge: // fall through
duke@435 283 case Bytecodes::_ifgt: // fall through
duke@435 284 case Bytecodes::_ifle: // fall through
duke@435 285 case Bytecodes::_if_icmpeq: // fall through
duke@435 286 case Bytecodes::_if_icmpne: // fall through
duke@435 287 case Bytecodes::_if_icmplt: // fall through
duke@435 288 case Bytecodes::_if_icmpge: // fall through
duke@435 289 case Bytecodes::_if_icmpgt: // fall through
duke@435 290 case Bytecodes::_if_icmple: // fall through
duke@435 291 case Bytecodes::_if_acmpeq: // fall through
duke@435 292 case Bytecodes::_if_acmpne: // fall through
duke@435 293 case Bytecodes::_ifnull: // fall through
duke@435 294 case Bytecodes::_ifnonnull:
duke@435 295 make_block_at(s.next_bci(), current);
duke@435 296 make_block_at(s.get_dest(), current);
duke@435 297 current = NULL;
duke@435 298 break;
duke@435 299
duke@435 300 case Bytecodes::_goto:
duke@435 301 make_block_at(s.get_dest(), current);
duke@435 302 current = NULL;
duke@435 303 break;
duke@435 304
duke@435 305 case Bytecodes::_goto_w:
duke@435 306 make_block_at(s.get_far_dest(), current);
duke@435 307 current = NULL;
duke@435 308 break;
duke@435 309
duke@435 310 case Bytecodes::_jsr:
duke@435 311 handle_jsr(current, s.get_dest(), s.next_bci());
duke@435 312 current = NULL;
duke@435 313 break;
duke@435 314
duke@435 315 case Bytecodes::_jsr_w:
duke@435 316 handle_jsr(current, s.get_far_dest(), s.next_bci());
duke@435 317 current = NULL;
duke@435 318 break;
duke@435 319
duke@435 320 case Bytecodes::_tableswitch: {
duke@435 321 // set block for each case
never@2462 322 Bytecode_tableswitch sw(&s);
never@2462 323 int l = sw.length();
duke@435 324 for (int i = 0; i < l; i++) {
never@2462 325 make_block_at(cur_bci + sw.dest_offset_at(i), current);
duke@435 326 }
never@2462 327 make_block_at(cur_bci + sw.default_offset(), current);
duke@435 328 current = NULL;
duke@435 329 break;
duke@435 330 }
duke@435 331
duke@435 332 case Bytecodes::_lookupswitch: {
duke@435 333 // set block for each case
never@2462 334 Bytecode_lookupswitch sw(&s);
never@2462 335 int l = sw.number_of_pairs();
duke@435 336 for (int i = 0; i < l; i++) {
never@2462 337 make_block_at(cur_bci + sw.pair_at(i).offset(), current);
duke@435 338 }
never@2462 339 make_block_at(cur_bci + sw.default_offset(), current);
duke@435 340 current = NULL;
duke@435 341 break;
duke@435 342 }
duke@435 343 }
duke@435 344 }
duke@435 345 }
duke@435 346
duke@435 347
duke@435 348 void BlockListBuilder::mark_loops() {
duke@435 349 ResourceMark rm;
duke@435 350
duke@435 351 _active = BitMap(BlockBegin::number_of_blocks()); _active.clear();
duke@435 352 _visited = BitMap(BlockBegin::number_of_blocks()); _visited.clear();
duke@435 353 _loop_map = intArray(BlockBegin::number_of_blocks(), 0);
duke@435 354 _next_loop_index = 0;
duke@435 355 _next_block_number = _blocks.length();
duke@435 356
duke@435 357 // recursively iterate the control flow graph
duke@435 358 mark_loops(_bci2block->at(0), false);
duke@435 359 assert(_next_block_number >= 0, "invalid block numbers");
duke@435 360 }
duke@435 361
duke@435 362 void BlockListBuilder::make_loop_header(BlockBegin* block) {
duke@435 363 if (block->is_set(BlockBegin::exception_entry_flag)) {
duke@435 364 // exception edges may look like loops but don't mark them as such
duke@435 365 // since it screws up block ordering.
duke@435 366 return;
duke@435 367 }
duke@435 368 if (!block->is_set(BlockBegin::parser_loop_header_flag)) {
duke@435 369 block->set(BlockBegin::parser_loop_header_flag);
duke@435 370
duke@435 371 assert(_loop_map.at(block->block_id()) == 0, "must not be set yet");
duke@435 372 assert(0 <= _next_loop_index && _next_loop_index < BitsPerInt, "_next_loop_index is used as a bit-index in integer");
duke@435 373 _loop_map.at_put(block->block_id(), 1 << _next_loop_index);
duke@435 374 if (_next_loop_index < 31) _next_loop_index++;
duke@435 375 } else {
duke@435 376 // block already marked as loop header
roland@1495 377 assert(is_power_of_2((unsigned int)_loop_map.at(block->block_id())), "exactly one bit must be set");
duke@435 378 }
duke@435 379 }
duke@435 380
duke@435 381 int BlockListBuilder::mark_loops(BlockBegin* block, bool in_subroutine) {
duke@435 382 int block_id = block->block_id();
duke@435 383
duke@435 384 if (_visited.at(block_id)) {
duke@435 385 if (_active.at(block_id)) {
duke@435 386 // reached block via backward branch
duke@435 387 make_loop_header(block);
duke@435 388 }
duke@435 389 // return cached loop information for this block
duke@435 390 return _loop_map.at(block_id);
duke@435 391 }
duke@435 392
duke@435 393 if (block->is_set(BlockBegin::subroutine_entry_flag)) {
duke@435 394 in_subroutine = true;
duke@435 395 }
duke@435 396
duke@435 397 // set active and visited bits before successors are processed
duke@435 398 _visited.set_bit(block_id);
duke@435 399 _active.set_bit(block_id);
duke@435 400
duke@435 401 intptr_t loop_state = 0;
duke@435 402 for (int i = block->number_of_sux() - 1; i >= 0; i--) {
duke@435 403 // recursively process all successors
duke@435 404 loop_state |= mark_loops(block->sux_at(i), in_subroutine);
duke@435 405 }
duke@435 406
duke@435 407 // clear active-bit after all successors are processed
duke@435 408 _active.clear_bit(block_id);
duke@435 409
duke@435 410 // reverse-post-order numbering of all blocks
duke@435 411 block->set_depth_first_number(_next_block_number);
duke@435 412 _next_block_number--;
duke@435 413
duke@435 414 if (loop_state != 0 || in_subroutine ) {
duke@435 415 // block is contained at least in one loop, so phi functions are necessary
duke@435 416 // phi functions are also necessary for all locals stored in a subroutine
duke@435 417 scope()->requires_phi_function().set_union(block->stores_to_locals());
duke@435 418 }
duke@435 419
duke@435 420 if (block->is_set(BlockBegin::parser_loop_header_flag)) {
duke@435 421 int header_loop_state = _loop_map.at(block_id);
duke@435 422 assert(is_power_of_2((unsigned)header_loop_state), "exactly one bit must be set");
duke@435 423
duke@435 424 // If the highest bit is set (i.e. when integer value is negative), the method
duke@435 425 // has 32 or more loops. This bit is never cleared because it is used for multiple loops
duke@435 426 if (header_loop_state >= 0) {
duke@435 427 clear_bits(loop_state, header_loop_state);
duke@435 428 }
duke@435 429 }
duke@435 430
duke@435 431 // cache and return loop information for this block
duke@435 432 _loop_map.at_put(block_id, loop_state);
duke@435 433 return loop_state;
duke@435 434 }
duke@435 435
duke@435 436
duke@435 437 #ifndef PRODUCT
duke@435 438
duke@435 439 int compare_depth_first(BlockBegin** a, BlockBegin** b) {
duke@435 440 return (*a)->depth_first_number() - (*b)->depth_first_number();
duke@435 441 }
duke@435 442
duke@435 443 void BlockListBuilder::print() {
duke@435 444 tty->print("----- initial block list of BlockListBuilder for method ");
duke@435 445 method()->print_short_name();
duke@435 446 tty->cr();
duke@435 447
duke@435 448 // better readability if blocks are sorted in processing order
duke@435 449 _blocks.sort(compare_depth_first);
duke@435 450
duke@435 451 for (int i = 0; i < _blocks.length(); i++) {
duke@435 452 BlockBegin* cur = _blocks.at(i);
duke@435 453 tty->print("%4d: B%-4d bci: %-4d preds: %-4d ", cur->depth_first_number(), cur->block_id(), cur->bci(), cur->total_preds());
duke@435 454
duke@435 455 tty->print(cur->is_set(BlockBegin::std_entry_flag) ? " std" : " ");
duke@435 456 tty->print(cur->is_set(BlockBegin::osr_entry_flag) ? " osr" : " ");
duke@435 457 tty->print(cur->is_set(BlockBegin::exception_entry_flag) ? " ex" : " ");
duke@435 458 tty->print(cur->is_set(BlockBegin::subroutine_entry_flag) ? " sr" : " ");
duke@435 459 tty->print(cur->is_set(BlockBegin::parser_loop_header_flag) ? " lh" : " ");
duke@435 460
duke@435 461 if (cur->number_of_sux() > 0) {
duke@435 462 tty->print(" sux: ");
duke@435 463 for (int j = 0; j < cur->number_of_sux(); j++) {
duke@435 464 BlockBegin* sux = cur->sux_at(j);
duke@435 465 tty->print("B%d ", sux->block_id());
duke@435 466 }
duke@435 467 }
duke@435 468 tty->cr();
duke@435 469 }
duke@435 470 }
duke@435 471
duke@435 472 #endif
duke@435 473
duke@435 474
duke@435 475 // A simple growable array of Values indexed by ciFields
duke@435 476 class FieldBuffer: public CompilationResourceObj {
duke@435 477 private:
duke@435 478 GrowableArray<Value> _values;
duke@435 479
duke@435 480 public:
duke@435 481 FieldBuffer() {}
duke@435 482
duke@435 483 void kill() {
duke@435 484 _values.trunc_to(0);
duke@435 485 }
duke@435 486
duke@435 487 Value at(ciField* field) {
duke@435 488 assert(field->holder()->is_loaded(), "must be a loaded field");
duke@435 489 int offset = field->offset();
duke@435 490 if (offset < _values.length()) {
duke@435 491 return _values.at(offset);
duke@435 492 } else {
duke@435 493 return NULL;
duke@435 494 }
duke@435 495 }
duke@435 496
duke@435 497 void at_put(ciField* field, Value value) {
duke@435 498 assert(field->holder()->is_loaded(), "must be a loaded field");
duke@435 499 int offset = field->offset();
duke@435 500 _values.at_put_grow(offset, value, NULL);
duke@435 501 }
duke@435 502
duke@435 503 };
duke@435 504
duke@435 505
duke@435 506 // MemoryBuffer is fairly simple model of the current state of memory.
duke@435 507 // It partitions memory into several pieces. The first piece is
duke@435 508 // generic memory where little is known about the owner of the memory.
duke@435 509 // This is conceptually represented by the tuple <O, F, V> which says
duke@435 510 // that the field F of object O has value V. This is flattened so
duke@435 511 // that F is represented by the offset of the field and the parallel
duke@435 512 // arrays _objects and _values are used for O and V. Loads of O.F can
duke@435 513 // simply use V. Newly allocated objects are kept in a separate list
duke@435 514 // along with a parallel array for each object which represents the
duke@435 515 // current value of its fields. Stores of the default value to fields
duke@435 516 // which have never been stored to before are eliminated since they
duke@435 517 // are redundant. Once newly allocated objects are stored into
duke@435 518 // another object or they are passed out of the current compile they
duke@435 519 // are treated like generic memory.
duke@435 520
duke@435 521 class MemoryBuffer: public CompilationResourceObj {
duke@435 522 private:
duke@435 523 FieldBuffer _values;
duke@435 524 GrowableArray<Value> _objects;
duke@435 525 GrowableArray<Value> _newobjects;
duke@435 526 GrowableArray<FieldBuffer*> _fields;
duke@435 527
duke@435 528 public:
duke@435 529 MemoryBuffer() {}
duke@435 530
duke@435 531 StoreField* store(StoreField* st) {
duke@435 532 if (!EliminateFieldAccess) {
duke@435 533 return st;
duke@435 534 }
duke@435 535
duke@435 536 Value object = st->obj();
duke@435 537 Value value = st->value();
duke@435 538 ciField* field = st->field();
duke@435 539 if (field->holder()->is_loaded()) {
duke@435 540 int offset = field->offset();
duke@435 541 int index = _newobjects.find(object);
duke@435 542 if (index != -1) {
duke@435 543 // newly allocated object with no other stores performed on this field
duke@435 544 FieldBuffer* buf = _fields.at(index);
duke@435 545 if (buf->at(field) == NULL && is_default_value(value)) {
duke@435 546 #ifndef PRODUCT
duke@435 547 if (PrintIRDuringConstruction && Verbose) {
duke@435 548 tty->print_cr("Eliminated store for object %d:", index);
duke@435 549 st->print_line();
duke@435 550 }
duke@435 551 #endif
duke@435 552 return NULL;
duke@435 553 } else {
duke@435 554 buf->at_put(field, value);
duke@435 555 }
duke@435 556 } else {
duke@435 557 _objects.at_put_grow(offset, object, NULL);
duke@435 558 _values.at_put(field, value);
duke@435 559 }
duke@435 560
duke@435 561 store_value(value);
duke@435 562 } else {
duke@435 563 // if we held onto field names we could alias based on names but
duke@435 564 // we don't know what's being stored to so kill it all.
duke@435 565 kill();
duke@435 566 }
duke@435 567 return st;
duke@435 568 }
duke@435 569
duke@435 570
duke@435 571 // return true if this value correspond to the default value of a field.
duke@435 572 bool is_default_value(Value value) {
duke@435 573 Constant* con = value->as_Constant();
duke@435 574 if (con) {
duke@435 575 switch (con->type()->tag()) {
duke@435 576 case intTag: return con->type()->as_IntConstant()->value() == 0;
duke@435 577 case longTag: return con->type()->as_LongConstant()->value() == 0;
duke@435 578 case floatTag: return jint_cast(con->type()->as_FloatConstant()->value()) == 0;
duke@435 579 case doubleTag: return jlong_cast(con->type()->as_DoubleConstant()->value()) == jlong_cast(0);
duke@435 580 case objectTag: return con->type() == objectNull;
duke@435 581 default: ShouldNotReachHere();
duke@435 582 }
duke@435 583 }
duke@435 584 return false;
duke@435 585 }
duke@435 586
duke@435 587
duke@435 588 // return either the actual value of a load or the load itself
duke@435 589 Value load(LoadField* load) {
duke@435 590 if (!EliminateFieldAccess) {
duke@435 591 return load;
duke@435 592 }
duke@435 593
duke@435 594 if (RoundFPResults && UseSSE < 2 && load->type()->is_float_kind()) {
duke@435 595 // can't skip load since value might get rounded as a side effect
duke@435 596 return load;
duke@435 597 }
duke@435 598
duke@435 599 ciField* field = load->field();
duke@435 600 Value object = load->obj();
duke@435 601 if (field->holder()->is_loaded() && !field->is_volatile()) {
duke@435 602 int offset = field->offset();
duke@435 603 Value result = NULL;
duke@435 604 int index = _newobjects.find(object);
duke@435 605 if (index != -1) {
duke@435 606 result = _fields.at(index)->at(field);
duke@435 607 } else if (_objects.at_grow(offset, NULL) == object) {
duke@435 608 result = _values.at(field);
duke@435 609 }
duke@435 610 if (result != NULL) {
duke@435 611 #ifndef PRODUCT
duke@435 612 if (PrintIRDuringConstruction && Verbose) {
duke@435 613 tty->print_cr("Eliminated load: ");
duke@435 614 load->print_line();
duke@435 615 }
duke@435 616 #endif
duke@435 617 assert(result->type()->tag() == load->type()->tag(), "wrong types");
duke@435 618 return result;
duke@435 619 }
duke@435 620 }
duke@435 621 return load;
duke@435 622 }
duke@435 623
duke@435 624 // Record this newly allocated object
duke@435 625 void new_instance(NewInstance* object) {
duke@435 626 int index = _newobjects.length();
duke@435 627 _newobjects.append(object);
duke@435 628 if (_fields.at_grow(index, NULL) == NULL) {
duke@435 629 _fields.at_put(index, new FieldBuffer());
duke@435 630 } else {
duke@435 631 _fields.at(index)->kill();
duke@435 632 }
duke@435 633 }
duke@435 634
duke@435 635 void store_value(Value value) {
duke@435 636 int index = _newobjects.find(value);
duke@435 637 if (index != -1) {
duke@435 638 // stored a newly allocated object into another object.
duke@435 639 // Assume we've lost track of it as separate slice of memory.
duke@435 640 // We could do better by keeping track of whether individual
duke@435 641 // fields could alias each other.
duke@435 642 _newobjects.remove_at(index);
duke@435 643 // pull out the field info and store it at the end up the list
duke@435 644 // of field info list to be reused later.
duke@435 645 _fields.append(_fields.at(index));
duke@435 646 _fields.remove_at(index);
duke@435 647 }
duke@435 648 }
duke@435 649
duke@435 650 void kill() {
duke@435 651 _newobjects.trunc_to(0);
duke@435 652 _objects.trunc_to(0);
duke@435 653 _values.kill();
duke@435 654 }
duke@435 655 };
duke@435 656
duke@435 657
duke@435 658 // Implementation of GraphBuilder's ScopeData
duke@435 659
duke@435 660 GraphBuilder::ScopeData::ScopeData(ScopeData* parent)
duke@435 661 : _parent(parent)
duke@435 662 , _bci2block(NULL)
duke@435 663 , _scope(NULL)
duke@435 664 , _has_handler(false)
duke@435 665 , _stream(NULL)
duke@435 666 , _work_list(NULL)
duke@435 667 , _parsing_jsr(false)
duke@435 668 , _jsr_xhandlers(NULL)
duke@435 669 , _caller_stack_size(-1)
duke@435 670 , _continuation(NULL)
duke@435 671 , _num_returns(0)
duke@435 672 , _cleanup_block(NULL)
duke@435 673 , _cleanup_return_prev(NULL)
duke@435 674 , _cleanup_state(NULL)
duke@435 675 {
duke@435 676 if (parent != NULL) {
duke@435 677 _max_inline_size = (intx) ((float) NestedInliningSizeRatio * (float) parent->max_inline_size() / 100.0f);
duke@435 678 } else {
duke@435 679 _max_inline_size = MaxInlineSize;
duke@435 680 }
duke@435 681 if (_max_inline_size < MaxTrivialSize) {
duke@435 682 _max_inline_size = MaxTrivialSize;
duke@435 683 }
duke@435 684 }
duke@435 685
duke@435 686
duke@435 687 void GraphBuilder::kill_all() {
duke@435 688 if (UseLocalValueNumbering) {
duke@435 689 vmap()->kill_all();
duke@435 690 }
duke@435 691 _memory->kill();
duke@435 692 }
duke@435 693
duke@435 694
duke@435 695 BlockBegin* GraphBuilder::ScopeData::block_at(int bci) {
duke@435 696 if (parsing_jsr()) {
duke@435 697 // It is necessary to clone all blocks associated with a
duke@435 698 // subroutine, including those for exception handlers in the scope
duke@435 699 // of the method containing the jsr (because those exception
duke@435 700 // handlers may contain ret instructions in some cases).
duke@435 701 BlockBegin* block = bci2block()->at(bci);
duke@435 702 if (block != NULL && block == parent()->bci2block()->at(bci)) {
duke@435 703 BlockBegin* new_block = new BlockBegin(block->bci());
duke@435 704 #ifndef PRODUCT
duke@435 705 if (PrintInitialBlockList) {
duke@435 706 tty->print_cr("CFG: cloned block %d (bci %d) as block %d for jsr",
duke@435 707 block->block_id(), block->bci(), new_block->block_id());
duke@435 708 }
duke@435 709 #endif
duke@435 710 // copy data from cloned blocked
duke@435 711 new_block->set_depth_first_number(block->depth_first_number());
duke@435 712 if (block->is_set(BlockBegin::parser_loop_header_flag)) new_block->set(BlockBegin::parser_loop_header_flag);
duke@435 713 // Preserve certain flags for assertion checking
duke@435 714 if (block->is_set(BlockBegin::subroutine_entry_flag)) new_block->set(BlockBegin::subroutine_entry_flag);
duke@435 715 if (block->is_set(BlockBegin::exception_entry_flag)) new_block->set(BlockBegin::exception_entry_flag);
duke@435 716
duke@435 717 // copy was_visited_flag to allow early detection of bailouts
duke@435 718 // if a block that is used in a jsr has already been visited before,
duke@435 719 // it is shared between the normal control flow and a subroutine
duke@435 720 // BlockBegin::try_merge returns false when the flag is set, this leads
duke@435 721 // to a compilation bailout
duke@435 722 if (block->is_set(BlockBegin::was_visited_flag)) new_block->set(BlockBegin::was_visited_flag);
duke@435 723
duke@435 724 bci2block()->at_put(bci, new_block);
duke@435 725 block = new_block;
duke@435 726 }
duke@435 727 return block;
duke@435 728 } else {
duke@435 729 return bci2block()->at(bci);
duke@435 730 }
duke@435 731 }
duke@435 732
duke@435 733
duke@435 734 XHandlers* GraphBuilder::ScopeData::xhandlers() const {
duke@435 735 if (_jsr_xhandlers == NULL) {
duke@435 736 assert(!parsing_jsr(), "");
duke@435 737 return scope()->xhandlers();
duke@435 738 }
duke@435 739 assert(parsing_jsr(), "");
duke@435 740 return _jsr_xhandlers;
duke@435 741 }
duke@435 742
duke@435 743
duke@435 744 void GraphBuilder::ScopeData::set_scope(IRScope* scope) {
duke@435 745 _scope = scope;
duke@435 746 bool parent_has_handler = false;
duke@435 747 if (parent() != NULL) {
duke@435 748 parent_has_handler = parent()->has_handler();
duke@435 749 }
duke@435 750 _has_handler = parent_has_handler || scope->xhandlers()->has_handlers();
duke@435 751 }
duke@435 752
duke@435 753
duke@435 754 void GraphBuilder::ScopeData::set_inline_cleanup_info(BlockBegin* block,
duke@435 755 Instruction* return_prev,
duke@435 756 ValueStack* return_state) {
duke@435 757 _cleanup_block = block;
duke@435 758 _cleanup_return_prev = return_prev;
duke@435 759 _cleanup_state = return_state;
duke@435 760 }
duke@435 761
duke@435 762
duke@435 763 void GraphBuilder::ScopeData::add_to_work_list(BlockBegin* block) {
duke@435 764 if (_work_list == NULL) {
duke@435 765 _work_list = new BlockList();
duke@435 766 }
duke@435 767
duke@435 768 if (!block->is_set(BlockBegin::is_on_work_list_flag)) {
duke@435 769 // Do not start parsing the continuation block while in a
duke@435 770 // sub-scope
duke@435 771 if (parsing_jsr()) {
duke@435 772 if (block == jsr_continuation()) {
duke@435 773 return;
duke@435 774 }
duke@435 775 } else {
duke@435 776 if (block == continuation()) {
duke@435 777 return;
duke@435 778 }
duke@435 779 }
duke@435 780 block->set(BlockBegin::is_on_work_list_flag);
duke@435 781 _work_list->push(block);
duke@435 782
duke@435 783 sort_top_into_worklist(_work_list, block);
duke@435 784 }
duke@435 785 }
duke@435 786
duke@435 787
duke@435 788 void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top) {
duke@435 789 assert(worklist->top() == top, "");
duke@435 790 // sort block descending into work list
duke@435 791 const int dfn = top->depth_first_number();
duke@435 792 assert(dfn != -1, "unknown depth first number");
duke@435 793 int i = worklist->length()-2;
duke@435 794 while (i >= 0) {
duke@435 795 BlockBegin* b = worklist->at(i);
duke@435 796 if (b->depth_first_number() < dfn) {
duke@435 797 worklist->at_put(i+1, b);
duke@435 798 } else {
duke@435 799 break;
duke@435 800 }
duke@435 801 i --;
duke@435 802 }
duke@435 803 if (i >= -1) worklist->at_put(i + 1, top);
duke@435 804 }
duke@435 805
duke@435 806
duke@435 807 BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {
duke@435 808 if (is_work_list_empty()) {
duke@435 809 return NULL;
duke@435 810 }
duke@435 811 return _work_list->pop();
duke@435 812 }
duke@435 813
duke@435 814
duke@435 815 bool GraphBuilder::ScopeData::is_work_list_empty() const {
duke@435 816 return (_work_list == NULL || _work_list->length() == 0);
duke@435 817 }
duke@435 818
duke@435 819
duke@435 820 void GraphBuilder::ScopeData::setup_jsr_xhandlers() {
duke@435 821 assert(parsing_jsr(), "");
duke@435 822 // clone all the exception handlers from the scope
duke@435 823 XHandlers* handlers = new XHandlers(scope()->xhandlers());
duke@435 824 const int n = handlers->length();
duke@435 825 for (int i = 0; i < n; i++) {
duke@435 826 // The XHandlers need to be adjusted to dispatch to the cloned
duke@435 827 // handler block instead of the default one but the synthetic
duke@435 828 // unlocker needs to be handled specially. The synthetic unlocker
duke@435 829 // should be left alone since there can be only one and all code
duke@435 830 // should dispatch to the same one.
duke@435 831 XHandler* h = handlers->handler_at(i);
never@1813 832 assert(h->handler_bci() != SynchronizationEntryBCI, "must be real");
never@1813 833 h->set_entry_block(block_at(h->handler_bci()));
duke@435 834 }
duke@435 835 _jsr_xhandlers = handlers;
duke@435 836 }
duke@435 837
duke@435 838
duke@435 839 int GraphBuilder::ScopeData::num_returns() {
duke@435 840 if (parsing_jsr()) {
duke@435 841 return parent()->num_returns();
duke@435 842 }
duke@435 843 return _num_returns;
duke@435 844 }
duke@435 845
duke@435 846
duke@435 847 void GraphBuilder::ScopeData::incr_num_returns() {
duke@435 848 if (parsing_jsr()) {
duke@435 849 parent()->incr_num_returns();
duke@435 850 } else {
duke@435 851 ++_num_returns;
duke@435 852 }
duke@435 853 }
duke@435 854
duke@435 855
duke@435 856 // Implementation of GraphBuilder
duke@435 857
duke@435 858 #define INLINE_BAILOUT(msg) { inline_bailout(msg); return false; }
duke@435 859
duke@435 860
duke@435 861 void GraphBuilder::load_constant() {
duke@435 862 ciConstant con = stream()->get_constant();
duke@435 863 if (con.basic_type() == T_ILLEGAL) {
duke@435 864 BAILOUT("could not resolve a constant");
duke@435 865 } else {
duke@435 866 ValueType* t = illegalType;
duke@435 867 ValueStack* patch_state = NULL;
duke@435 868 switch (con.basic_type()) {
duke@435 869 case T_BOOLEAN: t = new IntConstant (con.as_boolean()); break;
duke@435 870 case T_BYTE : t = new IntConstant (con.as_byte ()); break;
duke@435 871 case T_CHAR : t = new IntConstant (con.as_char ()); break;
duke@435 872 case T_SHORT : t = new IntConstant (con.as_short ()); break;
duke@435 873 case T_INT : t = new IntConstant (con.as_int ()); break;
duke@435 874 case T_LONG : t = new LongConstant (con.as_long ()); break;
duke@435 875 case T_FLOAT : t = new FloatConstant (con.as_float ()); break;
duke@435 876 case T_DOUBLE : t = new DoubleConstant (con.as_double ()); break;
duke@435 877 case T_ARRAY : t = new ArrayConstant (con.as_object ()->as_array ()); break;
duke@435 878 case T_OBJECT :
duke@435 879 {
duke@435 880 ciObject* obj = con.as_object();
jrose@1957 881 if (!obj->is_loaded()
jrose@1957 882 || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
roland@2174 883 patch_state = copy_state_before();
jrose@1957 884 t = new ObjectConstant(obj);
duke@435 885 } else {
jrose@1957 886 assert(!obj->is_klass(), "must be java_mirror of klass");
duke@435 887 t = new InstanceConstant(obj->as_instance());
duke@435 888 }
duke@435 889 break;
duke@435 890 }
duke@435 891 default : ShouldNotReachHere();
duke@435 892 }
duke@435 893 Value x;
duke@435 894 if (patch_state != NULL) {
duke@435 895 x = new Constant(t, patch_state);
duke@435 896 } else {
duke@435 897 x = new Constant(t);
duke@435 898 }
duke@435 899 push(t, append(x));
duke@435 900 }
duke@435 901 }
duke@435 902
duke@435 903
duke@435 904 void GraphBuilder::load_local(ValueType* type, int index) {
roland@2174 905 Value x = state()->local_at(index);
roland@2174 906 assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
duke@435 907 push(type, x);
duke@435 908 }
duke@435 909
duke@435 910
duke@435 911 void GraphBuilder::store_local(ValueType* type, int index) {
duke@435 912 Value x = pop(type);
duke@435 913 store_local(state(), x, type, index);
duke@435 914 }
duke@435 915
duke@435 916
duke@435 917 void GraphBuilder::store_local(ValueStack* state, Value x, ValueType* type, int index) {
duke@435 918 if (parsing_jsr()) {
duke@435 919 // We need to do additional tracking of the location of the return
duke@435 920 // address for jsrs since we don't handle arbitrary jsr/ret
duke@435 921 // constructs. Here we are figuring out in which circumstances we
duke@435 922 // need to bail out.
duke@435 923 if (x->type()->is_address()) {
duke@435 924 scope_data()->set_jsr_return_address_local(index);
duke@435 925
duke@435 926 // Also check parent jsrs (if any) at this time to see whether
duke@435 927 // they are using this local. We don't handle skipping over a
duke@435 928 // ret.
duke@435 929 for (ScopeData* cur_scope_data = scope_data()->parent();
duke@435 930 cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
duke@435 931 cur_scope_data = cur_scope_data->parent()) {
duke@435 932 if (cur_scope_data->jsr_return_address_local() == index) {
duke@435 933 BAILOUT("subroutine overwrites return address from previous subroutine");
duke@435 934 }
duke@435 935 }
duke@435 936 } else if (index == scope_data()->jsr_return_address_local()) {
duke@435 937 scope_data()->set_jsr_return_address_local(-1);
duke@435 938 }
duke@435 939 }
duke@435 940
duke@435 941 state->store_local(index, round_fp(x));
duke@435 942 }
duke@435 943
duke@435 944
duke@435 945 void GraphBuilder::load_indexed(BasicType type) {
roland@2174 946 ValueStack* state_before = copy_state_for_exception();
duke@435 947 Value index = ipop();
duke@435 948 Value array = apop();
duke@435 949 Value length = NULL;
duke@435 950 if (CSEArrayLength ||
duke@435 951 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
duke@435 952 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
roland@2174 953 length = append(new ArrayLength(array, state_before));
duke@435 954 }
roland@2174 955 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
duke@435 956 }
duke@435 957
duke@435 958
duke@435 959 void GraphBuilder::store_indexed(BasicType type) {
roland@2174 960 ValueStack* state_before = copy_state_for_exception();
duke@435 961 Value value = pop(as_ValueType(type));
duke@435 962 Value index = ipop();
duke@435 963 Value array = apop();
duke@435 964 Value length = NULL;
duke@435 965 if (CSEArrayLength ||
duke@435 966 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
duke@435 967 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
roland@2174 968 length = append(new ArrayLength(array, state_before));
duke@435 969 }
roland@2174 970 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before);
duke@435 971 append(result);
never@894 972 _memory->store_value(value);
iveresov@2146 973
iveresov@2146 974 if (type == T_OBJECT && is_profiling()) {
iveresov@2146 975 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2146 976 compilation()->set_would_profile(true);
iveresov@2146 977
iveresov@2146 978 if (profile_checkcasts()) {
iveresov@2146 979 result->set_profiled_method(method());
iveresov@2146 980 result->set_profiled_bci(bci());
iveresov@2146 981 result->set_should_profile(true);
iveresov@2146 982 }
iveresov@2146 983 }
duke@435 984 }
duke@435 985
duke@435 986
duke@435 987 void GraphBuilder::stack_op(Bytecodes::Code code) {
duke@435 988 switch (code) {
duke@435 989 case Bytecodes::_pop:
duke@435 990 { state()->raw_pop();
duke@435 991 }
duke@435 992 break;
duke@435 993 case Bytecodes::_pop2:
duke@435 994 { state()->raw_pop();
duke@435 995 state()->raw_pop();
duke@435 996 }
duke@435 997 break;
duke@435 998 case Bytecodes::_dup:
duke@435 999 { Value w = state()->raw_pop();
duke@435 1000 state()->raw_push(w);
duke@435 1001 state()->raw_push(w);
duke@435 1002 }
duke@435 1003 break;
duke@435 1004 case Bytecodes::_dup_x1:
duke@435 1005 { Value w1 = state()->raw_pop();
duke@435 1006 Value w2 = state()->raw_pop();
duke@435 1007 state()->raw_push(w1);
duke@435 1008 state()->raw_push(w2);
duke@435 1009 state()->raw_push(w1);
duke@435 1010 }
duke@435 1011 break;
duke@435 1012 case Bytecodes::_dup_x2:
duke@435 1013 { Value w1 = state()->raw_pop();
duke@435 1014 Value w2 = state()->raw_pop();
duke@435 1015 Value w3 = state()->raw_pop();
duke@435 1016 state()->raw_push(w1);
duke@435 1017 state()->raw_push(w3);
duke@435 1018 state()->raw_push(w2);
duke@435 1019 state()->raw_push(w1);
duke@435 1020 }
duke@435 1021 break;
duke@435 1022 case Bytecodes::_dup2:
duke@435 1023 { Value w1 = state()->raw_pop();
duke@435 1024 Value w2 = state()->raw_pop();
duke@435 1025 state()->raw_push(w2);
duke@435 1026 state()->raw_push(w1);
duke@435 1027 state()->raw_push(w2);
duke@435 1028 state()->raw_push(w1);
duke@435 1029 }
duke@435 1030 break;
duke@435 1031 case Bytecodes::_dup2_x1:
duke@435 1032 { Value w1 = state()->raw_pop();
duke@435 1033 Value w2 = state()->raw_pop();
duke@435 1034 Value w3 = state()->raw_pop();
duke@435 1035 state()->raw_push(w2);
duke@435 1036 state()->raw_push(w1);
duke@435 1037 state()->raw_push(w3);
duke@435 1038 state()->raw_push(w2);
duke@435 1039 state()->raw_push(w1);
duke@435 1040 }
duke@435 1041 break;
duke@435 1042 case Bytecodes::_dup2_x2:
duke@435 1043 { Value w1 = state()->raw_pop();
duke@435 1044 Value w2 = state()->raw_pop();
duke@435 1045 Value w3 = state()->raw_pop();
duke@435 1046 Value w4 = state()->raw_pop();
duke@435 1047 state()->raw_push(w2);
duke@435 1048 state()->raw_push(w1);
duke@435 1049 state()->raw_push(w4);
duke@435 1050 state()->raw_push(w3);
duke@435 1051 state()->raw_push(w2);
duke@435 1052 state()->raw_push(w1);
duke@435 1053 }
duke@435 1054 break;
duke@435 1055 case Bytecodes::_swap:
duke@435 1056 { Value w1 = state()->raw_pop();
duke@435 1057 Value w2 = state()->raw_pop();
duke@435 1058 state()->raw_push(w1);
duke@435 1059 state()->raw_push(w2);
duke@435 1060 }
duke@435 1061 break;
duke@435 1062 default:
duke@435 1063 ShouldNotReachHere();
duke@435 1064 break;
duke@435 1065 }
duke@435 1066 }
duke@435 1067
duke@435 1068
roland@2174 1069 void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {
duke@435 1070 Value y = pop(type);
duke@435 1071 Value x = pop(type);
duke@435 1072 // NOTE: strictfp can be queried from current method since we don't
duke@435 1073 // inline methods with differing strictfp bits
roland@2174 1074 Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);
duke@435 1075 // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level
duke@435 1076 res = append(res);
duke@435 1077 if (method()->is_strict()) {
duke@435 1078 res = round_fp(res);
duke@435 1079 }
duke@435 1080 push(type, res);
duke@435 1081 }
duke@435 1082
duke@435 1083
duke@435 1084 void GraphBuilder::negate_op(ValueType* type) {
duke@435 1085 push(type, append(new NegateOp(pop(type))));
duke@435 1086 }
duke@435 1087
duke@435 1088
duke@435 1089 void GraphBuilder::shift_op(ValueType* type, Bytecodes::Code code) {
duke@435 1090 Value s = ipop();
duke@435 1091 Value x = pop(type);
duke@435 1092 // try to simplify
duke@435 1093 // Note: This code should go into the canonicalizer as soon as it can
duke@435 1094 // can handle canonicalized forms that contain more than one node.
duke@435 1095 if (CanonicalizeNodes && code == Bytecodes::_iushr) {
duke@435 1096 // pattern: x >>> s
duke@435 1097 IntConstant* s1 = s->type()->as_IntConstant();
duke@435 1098 if (s1 != NULL) {
duke@435 1099 // pattern: x >>> s1, with s1 constant
duke@435 1100 ShiftOp* l = x->as_ShiftOp();
duke@435 1101 if (l != NULL && l->op() == Bytecodes::_ishl) {
duke@435 1102 // pattern: (a << b) >>> s1
duke@435 1103 IntConstant* s0 = l->y()->type()->as_IntConstant();
duke@435 1104 if (s0 != NULL) {
duke@435 1105 // pattern: (a << s0) >>> s1
duke@435 1106 const int s0c = s0->value() & 0x1F; // only the low 5 bits are significant for shifts
duke@435 1107 const int s1c = s1->value() & 0x1F; // only the low 5 bits are significant for shifts
duke@435 1108 if (s0c == s1c) {
duke@435 1109 if (s0c == 0) {
duke@435 1110 // pattern: (a << 0) >>> 0 => simplify to: a
duke@435 1111 ipush(l->x());
duke@435 1112 } else {
duke@435 1113 // pattern: (a << s0c) >>> s0c => simplify to: a & m, with m constant
duke@435 1114 assert(0 < s0c && s0c < BitsPerInt, "adjust code below to handle corner cases");
duke@435 1115 const int m = (1 << (BitsPerInt - s0c)) - 1;
duke@435 1116 Value s = append(new Constant(new IntConstant(m)));
duke@435 1117 ipush(append(new LogicOp(Bytecodes::_iand, l->x(), s)));
duke@435 1118 }
duke@435 1119 return;
duke@435 1120 }
duke@435 1121 }
duke@435 1122 }
duke@435 1123 }
duke@435 1124 }
duke@435 1125 // could not simplify
duke@435 1126 push(type, append(new ShiftOp(code, x, s)));
duke@435 1127 }
duke@435 1128
duke@435 1129
duke@435 1130 void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) {
duke@435 1131 Value y = pop(type);
duke@435 1132 Value x = pop(type);
duke@435 1133 push(type, append(new LogicOp(code, x, y)));
duke@435 1134 }
duke@435 1135
duke@435 1136
duke@435 1137 void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {
roland@2174 1138 ValueStack* state_before = copy_state_before();
duke@435 1139 Value y = pop(type);
duke@435 1140 Value x = pop(type);
duke@435 1141 ipush(append(new CompareOp(code, x, y, state_before)));
duke@435 1142 }
duke@435 1143
duke@435 1144
duke@435 1145 void GraphBuilder::convert(Bytecodes::Code op, BasicType from, BasicType to) {
duke@435 1146 push(as_ValueType(to), append(new Convert(op, pop(as_ValueType(from)), as_ValueType(to))));
duke@435 1147 }
duke@435 1148
duke@435 1149
duke@435 1150 void GraphBuilder::increment() {
duke@435 1151 int index = stream()->get_index();
duke@435 1152 int delta = stream()->is_wide() ? (signed short)Bytes::get_Java_u2(stream()->cur_bcp() + 4) : (signed char)(stream()->cur_bcp()[2]);
duke@435 1153 load_local(intType, index);
duke@435 1154 ipush(append(new Constant(new IntConstant(delta))));
duke@435 1155 arithmetic_op(intType, Bytecodes::_iadd);
duke@435 1156 store_local(intType, index);
duke@435 1157 }
duke@435 1158
duke@435 1159
duke@435 1160 void GraphBuilder::_goto(int from_bci, int to_bci) {
iveresov@2138 1161 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
iveresov@2138 1162 if (is_profiling()) {
iveresov@2138 1163 compilation()->set_would_profile(true);
iveresov@2138 1164 }
iveresov@2138 1165 if (profile_branches()) {
iveresov@2138 1166 x->set_profiled_method(method());
iveresov@2138 1167 x->set_profiled_bci(bci());
iveresov@2138 1168 x->set_should_profile(true);
iveresov@2138 1169 }
iveresov@2138 1170 append(x);
duke@435 1171 }
duke@435 1172
duke@435 1173
duke@435 1174 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
duke@435 1175 BlockBegin* tsux = block_at(stream()->get_dest());
duke@435 1176 BlockBegin* fsux = block_at(stream()->next_bci());
duke@435 1177 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
iveresov@2138 1178 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, is_bb ? state_before : NULL, is_bb));
iveresov@2138 1179
iveresov@2138 1180 if (is_profiling()) {
iveresov@2138 1181 If* if_node = i->as_If();
iveresov@2138 1182 if (if_node != NULL) {
iveresov@2138 1183 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2138 1184 compilation()->set_would_profile(true);
iveresov@2138 1185 // At level 2 we need the proper bci to count backedges
iveresov@2138 1186 if_node->set_profiled_bci(bci());
iveresov@2138 1187 if (profile_branches()) {
iveresov@2138 1188 // Successors can be rotated by the canonicalizer, check for this case.
iveresov@2138 1189 if_node->set_profiled_method(method());
iveresov@2138 1190 if_node->set_should_profile(true);
iveresov@2138 1191 if (if_node->tsux() == fsux) {
iveresov@2138 1192 if_node->set_swapped(true);
iveresov@2138 1193 }
iveresov@2138 1194 }
iveresov@2138 1195 return;
iveresov@2138 1196 }
iveresov@2138 1197
iveresov@2138 1198 // Check if this If was reduced to Goto.
iveresov@2138 1199 Goto *goto_node = i->as_Goto();
iveresov@2138 1200 if (goto_node != NULL) {
iveresov@2138 1201 compilation()->set_would_profile(true);
iveresov@2138 1202 if (profile_branches()) {
iveresov@2138 1203 goto_node->set_profiled_method(method());
iveresov@2138 1204 goto_node->set_profiled_bci(bci());
iveresov@2138 1205 goto_node->set_should_profile(true);
iveresov@2138 1206 // Find out which successor is used.
iveresov@2138 1207 if (goto_node->default_sux() == tsux) {
iveresov@2138 1208 goto_node->set_direction(Goto::taken);
iveresov@2138 1209 } else if (goto_node->default_sux() == fsux) {
iveresov@2138 1210 goto_node->set_direction(Goto::not_taken);
iveresov@2138 1211 } else {
iveresov@2138 1212 ShouldNotReachHere();
iveresov@2138 1213 }
iveresov@2138 1214 }
iveresov@2138 1215 return;
iveresov@2138 1216 }
duke@435 1217 }
duke@435 1218 }
duke@435 1219
duke@435 1220
duke@435 1221 void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
duke@435 1222 Value y = append(new Constant(intZero));
roland@2174 1223 ValueStack* state_before = copy_state_before();
duke@435 1224 Value x = ipop();
duke@435 1225 if_node(x, cond, y, state_before);
duke@435 1226 }
duke@435 1227
duke@435 1228
duke@435 1229 void GraphBuilder::if_null(ValueType* type, If::Condition cond) {
duke@435 1230 Value y = append(new Constant(objectNull));
roland@2174 1231 ValueStack* state_before = copy_state_before();
duke@435 1232 Value x = apop();
duke@435 1233 if_node(x, cond, y, state_before);
duke@435 1234 }
duke@435 1235
duke@435 1236
duke@435 1237 void GraphBuilder::if_same(ValueType* type, If::Condition cond) {
roland@2174 1238 ValueStack* state_before = copy_state_before();
duke@435 1239 Value y = pop(type);
duke@435 1240 Value x = pop(type);
duke@435 1241 if_node(x, cond, y, state_before);
duke@435 1242 }
duke@435 1243
duke@435 1244
duke@435 1245 void GraphBuilder::jsr(int dest) {
duke@435 1246 // We only handle well-formed jsrs (those which are "block-structured").
duke@435 1247 // If the bytecodes are strange (jumping out of a jsr block) then we
duke@435 1248 // might end up trying to re-parse a block containing a jsr which
duke@435 1249 // has already been activated. Watch for this case and bail out.
duke@435 1250 for (ScopeData* cur_scope_data = scope_data();
duke@435 1251 cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
duke@435 1252 cur_scope_data = cur_scope_data->parent()) {
duke@435 1253 if (cur_scope_data->jsr_entry_bci() == dest) {
duke@435 1254 BAILOUT("too-complicated jsr/ret structure");
duke@435 1255 }
duke@435 1256 }
duke@435 1257
duke@435 1258 push(addressType, append(new Constant(new AddressConstant(next_bci()))));
duke@435 1259 if (!try_inline_jsr(dest)) {
duke@435 1260 return; // bailed out while parsing and inlining subroutine
duke@435 1261 }
duke@435 1262 }
duke@435 1263
duke@435 1264
duke@435 1265 void GraphBuilder::ret(int local_index) {
duke@435 1266 if (!parsing_jsr()) BAILOUT("ret encountered while not parsing subroutine");
duke@435 1267
duke@435 1268 if (local_index != scope_data()->jsr_return_address_local()) {
duke@435 1269 BAILOUT("can not handle complicated jsr/ret constructs");
duke@435 1270 }
duke@435 1271
duke@435 1272 // Rets simply become (NON-SAFEPOINT) gotos to the jsr continuation
duke@435 1273 append(new Goto(scope_data()->jsr_continuation(), false));
duke@435 1274 }
duke@435 1275
duke@435 1276
duke@435 1277 void GraphBuilder::table_switch() {
never@2462 1278 Bytecode_tableswitch sw(stream());
never@2462 1279 const int l = sw.length();
duke@435 1280 if (CanonicalizeNodes && l == 1) {
duke@435 1281 // total of 2 successors => use If instead of switch
duke@435 1282 // Note: This code should go into the canonicalizer as soon as it can
duke@435 1283 // can handle canonicalized forms that contain more than one node.
never@2462 1284 Value key = append(new Constant(new IntConstant(sw.low_key())));
never@2462 1285 BlockBegin* tsux = block_at(bci() + sw.dest_offset_at(0));
never@2462 1286 BlockBegin* fsux = block_at(bci() + sw.default_offset());
duke@435 1287 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
roland@2174 1288 ValueStack* state_before = is_bb ? copy_state_before() : NULL;
duke@435 1289 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
duke@435 1290 } else {
duke@435 1291 // collect successors
duke@435 1292 BlockList* sux = new BlockList(l + 1, NULL);
duke@435 1293 int i;
duke@435 1294 bool has_bb = false;
duke@435 1295 for (i = 0; i < l; i++) {
never@2462 1296 sux->at_put(i, block_at(bci() + sw.dest_offset_at(i)));
never@2462 1297 if (sw.dest_offset_at(i) < 0) has_bb = true;
duke@435 1298 }
duke@435 1299 // add default successor
never@2462 1300 sux->at_put(i, block_at(bci() + sw.default_offset()));
roland@2174 1301 ValueStack* state_before = has_bb ? copy_state_before() : NULL;
never@2462 1302 append(new TableSwitch(ipop(), sux, sw.low_key(), state_before, has_bb));
duke@435 1303 }
duke@435 1304 }
duke@435 1305
duke@435 1306
duke@435 1307 void GraphBuilder::lookup_switch() {
never@2462 1308 Bytecode_lookupswitch sw(stream());
never@2462 1309 const int l = sw.number_of_pairs();
duke@435 1310 if (CanonicalizeNodes && l == 1) {
duke@435 1311 // total of 2 successors => use If instead of switch
duke@435 1312 // Note: This code should go into the canonicalizer as soon as it can
duke@435 1313 // can handle canonicalized forms that contain more than one node.
duke@435 1314 // simplify to If
never@2462 1315 LookupswitchPair pair = sw.pair_at(0);
never@2462 1316 Value key = append(new Constant(new IntConstant(pair.match())));
never@2462 1317 BlockBegin* tsux = block_at(bci() + pair.offset());
never@2462 1318 BlockBegin* fsux = block_at(bci() + sw.default_offset());
duke@435 1319 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
roland@2174 1320 ValueStack* state_before = is_bb ? copy_state_before() : NULL;
duke@435 1321 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
duke@435 1322 } else {
duke@435 1323 // collect successors & keys
duke@435 1324 BlockList* sux = new BlockList(l + 1, NULL);
duke@435 1325 intArray* keys = new intArray(l, 0);
duke@435 1326 int i;
duke@435 1327 bool has_bb = false;
duke@435 1328 for (i = 0; i < l; i++) {
never@2462 1329 LookupswitchPair pair = sw.pair_at(i);
never@2462 1330 if (pair.offset() < 0) has_bb = true;
never@2462 1331 sux->at_put(i, block_at(bci() + pair.offset()));
never@2462 1332 keys->at_put(i, pair.match());
duke@435 1333 }
duke@435 1334 // add default successor
never@2462 1335 sux->at_put(i, block_at(bci() + sw.default_offset()));
roland@2174 1336 ValueStack* state_before = has_bb ? copy_state_before() : NULL;
duke@435 1337 append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));
duke@435 1338 }
duke@435 1339 }
duke@435 1340
duke@435 1341 void GraphBuilder::call_register_finalizer() {
duke@435 1342 // If the receiver requires finalization then emit code to perform
duke@435 1343 // the registration on return.
duke@435 1344
duke@435 1345 // Gather some type information about the receiver
roland@2174 1346 Value receiver = state()->local_at(0);
duke@435 1347 assert(receiver != NULL, "must have a receiver");
duke@435 1348 ciType* declared_type = receiver->declared_type();
duke@435 1349 ciType* exact_type = receiver->exact_type();
duke@435 1350 if (exact_type == NULL &&
duke@435 1351 receiver->as_Local() &&
duke@435 1352 receiver->as_Local()->java_index() == 0) {
duke@435 1353 ciInstanceKlass* ik = compilation()->method()->holder();
duke@435 1354 if (ik->is_final()) {
duke@435 1355 exact_type = ik;
duke@435 1356 } else if (UseCHA && !(ik->has_subklass() || ik->is_interface())) {
duke@435 1357 // test class is leaf class
duke@435 1358 compilation()->dependency_recorder()->assert_leaf_type(ik);
duke@435 1359 exact_type = ik;
duke@435 1360 } else {
duke@435 1361 declared_type = ik;
duke@435 1362 }
duke@435 1363 }
duke@435 1364
duke@435 1365 // see if we know statically that registration isn't required
duke@435 1366 bool needs_check = true;
duke@435 1367 if (exact_type != NULL) {
duke@435 1368 needs_check = exact_type->as_instance_klass()->has_finalizer();
duke@435 1369 } else if (declared_type != NULL) {
duke@435 1370 ciInstanceKlass* ik = declared_type->as_instance_klass();
duke@435 1371 if (!Dependencies::has_finalizable_subclass(ik)) {
duke@435 1372 compilation()->dependency_recorder()->assert_has_no_finalizable_subclasses(ik);
duke@435 1373 needs_check = false;
duke@435 1374 }
duke@435 1375 }
duke@435 1376
duke@435 1377 if (needs_check) {
duke@435 1378 // Perform the registration of finalizable objects.
roland@2174 1379 ValueStack* state_before = copy_state_for_exception();
duke@435 1380 load_local(objectType, 0);
duke@435 1381 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
duke@435 1382 state()->pop_arguments(1),
roland@2174 1383 true, state_before, true));
duke@435 1384 }
duke@435 1385 }
duke@435 1386
duke@435 1387
duke@435 1388 void GraphBuilder::method_return(Value x) {
duke@435 1389 if (RegisterFinalizersAtInit &&
duke@435 1390 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
duke@435 1391 call_register_finalizer();
duke@435 1392 }
duke@435 1393
duke@435 1394 // Check to see whether we are inlining. If so, Return
duke@435 1395 // instructions become Gotos to the continuation point.
duke@435 1396 if (continuation() != NULL) {
duke@435 1397 assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet");
duke@435 1398
never@2486 1399 if (compilation()->env()->dtrace_method_probes()) {
never@2486 1400 // Report exit from inline methods
never@2486 1401 Values* args = new Values(1);
never@2486 1402 args->push(append(new Constant(new ObjectConstant(method()))));
never@2486 1403 append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));
never@2486 1404 }
never@2486 1405
duke@435 1406 // If the inlined method is synchronized, the monitor must be
duke@435 1407 // released before we jump to the continuation block.
duke@435 1408 if (method()->is_synchronized()) {
roland@2174 1409 assert(state()->locks_size() == 1, "receiver must be locked here");
roland@2174 1410 monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
duke@435 1411 }
duke@435 1412
roland@2174 1413 // State at end of inlined method is the state of the caller
roland@2174 1414 // without the method parameters on stack, including the
roland@2174 1415 // return value, if any, of the inlined method on operand stack.
roland@2174 1416 set_state(state()->caller_state()->copy_for_parsing());
duke@435 1417 if (x != NULL) {
duke@435 1418 state()->push(x->type(), x);
duke@435 1419 }
duke@435 1420 Goto* goto_callee = new Goto(continuation(), false);
duke@435 1421
duke@435 1422 // See whether this is the first return; if so, store off some
duke@435 1423 // of the state for later examination
duke@435 1424 if (num_returns() == 0) {
duke@435 1425 set_inline_cleanup_info(_block, _last, state());
duke@435 1426 }
duke@435 1427
duke@435 1428 // The current bci() is in the wrong scope, so use the bci() of
duke@435 1429 // the continuation point.
duke@435 1430 append_with_bci(goto_callee, scope_data()->continuation()->bci());
duke@435 1431 incr_num_returns();
duke@435 1432
duke@435 1433 return;
duke@435 1434 }
duke@435 1435
duke@435 1436 state()->truncate_stack(0);
duke@435 1437 if (method()->is_synchronized()) {
duke@435 1438 // perform the unlocking before exiting the method
duke@435 1439 Value receiver;
duke@435 1440 if (!method()->is_static()) {
duke@435 1441 receiver = _initial_state->local_at(0);
duke@435 1442 } else {
duke@435 1443 receiver = append(new Constant(new ClassConstant(method()->holder())));
duke@435 1444 }
duke@435 1445 append_split(new MonitorExit(receiver, state()->unlock()));
duke@435 1446 }
duke@435 1447
duke@435 1448 append(new Return(x));
duke@435 1449 }
duke@435 1450
duke@435 1451
duke@435 1452 void GraphBuilder::access_field(Bytecodes::Code code) {
duke@435 1453 bool will_link;
duke@435 1454 ciField* field = stream()->get_field(will_link);
duke@435 1455 ciInstanceKlass* holder = field->holder();
duke@435 1456 BasicType field_type = field->type()->basic_type();
duke@435 1457 ValueType* type = as_ValueType(field_type);
duke@435 1458 // call will_link again to determine if the field is valid.
never@2634 1459 const bool needs_patching = !holder->is_loaded() ||
never@2634 1460 !field->will_link(method()->holder(), code) ||
never@2634 1461 PatchALot;
duke@435 1462
roland@2174 1463 ValueStack* state_before = NULL;
never@2634 1464 if (!holder->is_initialized() || needs_patching) {
duke@435 1465 // save state before instruction for debug info when
duke@435 1466 // deoptimization happens during patching
roland@2174 1467 state_before = copy_state_before();
duke@435 1468 }
duke@435 1469
duke@435 1470 Value obj = NULL;
duke@435 1471 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
roland@2174 1472 if (state_before != NULL) {
duke@435 1473 // build a patching constant
never@2658 1474 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
duke@435 1475 } else {
never@2658 1476 obj = new Constant(new InstanceConstant(holder->java_mirror()));
duke@435 1477 }
duke@435 1478 }
duke@435 1479
duke@435 1480
never@2634 1481 const int offset = !needs_patching ? field->offset() : -1;
duke@435 1482 switch (code) {
duke@435 1483 case Bytecodes::_getstatic: {
duke@435 1484 // check for compile-time constants, i.e., initialized static final fields
duke@435 1485 Instruction* constant = NULL;
duke@435 1486 if (field->is_constant() && !PatchALot) {
duke@435 1487 ciConstant field_val = field->constant_value();
duke@435 1488 BasicType field_type = field_val.basic_type();
duke@435 1489 switch (field_type) {
duke@435 1490 case T_ARRAY:
duke@435 1491 case T_OBJECT:
jrose@1424 1492 if (field_val.as_object()->should_be_constant()) {
duke@435 1493 constant = new Constant(as_ValueType(field_val));
duke@435 1494 }
duke@435 1495 break;
duke@435 1496
duke@435 1497 default:
duke@435 1498 constant = new Constant(as_ValueType(field_val));
duke@435 1499 }
duke@435 1500 }
duke@435 1501 if (constant != NULL) {
duke@435 1502 push(type, append(constant));
duke@435 1503 } else {
roland@2174 1504 if (state_before == NULL) {
roland@2174 1505 state_before = copy_state_for_exception();
roland@2174 1506 }
duke@435 1507 push(type, append(new LoadField(append(obj), offset, field, true,
never@2634 1508 state_before, needs_patching)));
duke@435 1509 }
duke@435 1510 break;
duke@435 1511 }
duke@435 1512 case Bytecodes::_putstatic:
duke@435 1513 { Value val = pop(type);
roland@2174 1514 if (state_before == NULL) {
roland@2174 1515 state_before = copy_state_for_exception();
roland@2174 1516 }
never@2634 1517 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
duke@435 1518 }
duke@435 1519 break;
duke@435 1520 case Bytecodes::_getfield :
duke@435 1521 {
roland@2174 1522 if (state_before == NULL) {
roland@2174 1523 state_before = copy_state_for_exception();
roland@2174 1524 }
never@2634 1525 LoadField* load = new LoadField(apop(), offset, field, false, state_before, needs_patching);
never@2634 1526 Value replacement = !needs_patching ? _memory->load(load) : load;
duke@435 1527 if (replacement != load) {
roland@2174 1528 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
duke@435 1529 push(type, replacement);
duke@435 1530 } else {
duke@435 1531 push(type, append(load));
duke@435 1532 }
duke@435 1533 break;
duke@435 1534 }
duke@435 1535
duke@435 1536 case Bytecodes::_putfield :
duke@435 1537 { Value val = pop(type);
roland@2174 1538 if (state_before == NULL) {
roland@2174 1539 state_before = copy_state_for_exception();
roland@2174 1540 }
never@2634 1541 StoreField* store = new StoreField(apop(), offset, field, val, false, state_before, needs_patching);
never@2634 1542 if (!needs_patching) store = _memory->store(store);
duke@435 1543 if (store != NULL) {
duke@435 1544 append(store);
duke@435 1545 }
duke@435 1546 }
duke@435 1547 break;
duke@435 1548 default :
duke@435 1549 ShouldNotReachHere();
duke@435 1550 break;
duke@435 1551 }
duke@435 1552 }
duke@435 1553
duke@435 1554
duke@435 1555 Dependencies* GraphBuilder::dependency_recorder() const {
duke@435 1556 assert(DeoptC1, "need debug information");
duke@435 1557 return compilation()->dependency_recorder();
duke@435 1558 }
duke@435 1559
duke@435 1560
duke@435 1561 void GraphBuilder::invoke(Bytecodes::Code code) {
duke@435 1562 bool will_link;
duke@435 1563 ciMethod* target = stream()->get_method(will_link);
duke@435 1564 // we have to make sure the argument size (incl. the receiver)
duke@435 1565 // is correct for compilation (the call would fail later during
duke@435 1566 // linkage anyway) - was bug (gri 7/28/99)
duke@435 1567 if (target->is_loaded() && target->is_static() != (code == Bytecodes::_invokestatic)) BAILOUT("will cause link error");
duke@435 1568 ciInstanceKlass* klass = target->holder();
duke@435 1569
duke@435 1570 // check if CHA possible: if so, change the code to invoke_special
duke@435 1571 ciInstanceKlass* calling_klass = method()->holder();
duke@435 1572 ciKlass* holder = stream()->get_declared_method_holder();
duke@435 1573 ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
duke@435 1574 ciInstanceKlass* actual_recv = callee_holder;
duke@435 1575
duke@435 1576 // some methods are obviously bindable without any type checks so
duke@435 1577 // convert them directly to an invokespecial.
duke@435 1578 if (target->is_loaded() && !target->is_abstract() &&
duke@435 1579 target->can_be_statically_bound() && code == Bytecodes::_invokevirtual) {
duke@435 1580 code = Bytecodes::_invokespecial;
duke@435 1581 }
duke@435 1582
duke@435 1583 // NEEDS_CLEANUP
duke@435 1584 // I've added the target-is_loaded() test below but I don't really understand
duke@435 1585 // how klass->is_loaded() can be true and yet target->is_loaded() is false.
duke@435 1586 // this happened while running the JCK invokevirtual tests under doit. TKR
duke@435 1587 ciMethod* cha_monomorphic_target = NULL;
duke@435 1588 ciMethod* exact_target = NULL;
twisti@1730 1589 if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded() &&
twisti@1730 1590 !target->is_method_handle_invoke()) {
duke@435 1591 Value receiver = NULL;
duke@435 1592 ciInstanceKlass* receiver_klass = NULL;
duke@435 1593 bool type_is_exact = false;
duke@435 1594 // try to find a precise receiver type
duke@435 1595 if (will_link && !target->is_static()) {
duke@435 1596 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
duke@435 1597 receiver = state()->stack_at(index);
duke@435 1598 ciType* type = receiver->exact_type();
duke@435 1599 if (type != NULL && type->is_loaded() &&
duke@435 1600 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
duke@435 1601 receiver_klass = (ciInstanceKlass*) type;
duke@435 1602 type_is_exact = true;
duke@435 1603 }
duke@435 1604 if (type == NULL) {
duke@435 1605 type = receiver->declared_type();
duke@435 1606 if (type != NULL && type->is_loaded() &&
duke@435 1607 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
duke@435 1608 receiver_klass = (ciInstanceKlass*) type;
duke@435 1609 if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {
duke@435 1610 // Insert a dependency on this type since
duke@435 1611 // find_monomorphic_target may assume it's already done.
duke@435 1612 dependency_recorder()->assert_leaf_type(receiver_klass);
duke@435 1613 type_is_exact = true;
duke@435 1614 }
duke@435 1615 }
duke@435 1616 }
duke@435 1617 }
duke@435 1618 if (receiver_klass != NULL && type_is_exact &&
duke@435 1619 receiver_klass->is_loaded() && code != Bytecodes::_invokespecial) {
duke@435 1620 // If we have the exact receiver type we can bind directly to
duke@435 1621 // the method to call.
duke@435 1622 exact_target = target->resolve_invoke(calling_klass, receiver_klass);
duke@435 1623 if (exact_target != NULL) {
duke@435 1624 target = exact_target;
duke@435 1625 code = Bytecodes::_invokespecial;
duke@435 1626 }
duke@435 1627 }
duke@435 1628 if (receiver_klass != NULL &&
duke@435 1629 receiver_klass->is_subtype_of(actual_recv) &&
duke@435 1630 actual_recv->is_initialized()) {
duke@435 1631 actual_recv = receiver_klass;
duke@435 1632 }
duke@435 1633
duke@435 1634 if ((code == Bytecodes::_invokevirtual && callee_holder->is_initialized()) ||
duke@435 1635 (code == Bytecodes::_invokeinterface && callee_holder->is_initialized() && !actual_recv->is_interface())) {
duke@435 1636 // Use CHA on the receiver to select a more precise method.
duke@435 1637 cha_monomorphic_target = target->find_monomorphic_target(calling_klass, callee_holder, actual_recv);
duke@435 1638 } else if (code == Bytecodes::_invokeinterface && callee_holder->is_loaded() && receiver != NULL) {
duke@435 1639 // if there is only one implementor of this interface then we
duke@435 1640 // may be able bind this invoke directly to the implementing
duke@435 1641 // klass but we need both a dependence on the single interface
duke@435 1642 // and on the method we bind to. Additionally since all we know
duke@435 1643 // about the receiver type is the it's supposed to implement the
duke@435 1644 // interface we have to insert a check that it's the class we
duke@435 1645 // expect. Interface types are not checked by the verifier so
duke@435 1646 // they are roughly equivalent to Object.
duke@435 1647 ciInstanceKlass* singleton = NULL;
duke@435 1648 if (target->holder()->nof_implementors() == 1) {
duke@435 1649 singleton = target->holder()->implementor(0);
duke@435 1650 }
duke@435 1651 if (singleton) {
duke@435 1652 cha_monomorphic_target = target->find_monomorphic_target(calling_klass, target->holder(), singleton);
duke@435 1653 if (cha_monomorphic_target != NULL) {
duke@435 1654 // If CHA is able to bind this invoke then update the class
duke@435 1655 // to match that class, otherwise klass will refer to the
duke@435 1656 // interface.
duke@435 1657 klass = cha_monomorphic_target->holder();
duke@435 1658 actual_recv = target->holder();
duke@435 1659
duke@435 1660 // insert a check it's really the expected class.
roland@2174 1661 CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());
duke@435 1662 c->set_incompatible_class_change_check();
duke@435 1663 c->set_direct_compare(klass->is_final());
duke@435 1664 append_split(c);
duke@435 1665 }
duke@435 1666 }
duke@435 1667 }
duke@435 1668 }
duke@435 1669
duke@435 1670 if (cha_monomorphic_target != NULL) {
duke@435 1671 if (cha_monomorphic_target->is_abstract()) {
duke@435 1672 // Do not optimize for abstract methods
duke@435 1673 cha_monomorphic_target = NULL;
duke@435 1674 }
duke@435 1675 }
duke@435 1676
duke@435 1677 if (cha_monomorphic_target != NULL) {
duke@435 1678 if (!(target->is_final_method())) {
duke@435 1679 // If we inlined because CHA revealed only a single target method,
duke@435 1680 // then we are dependent on that target method not getting overridden
duke@435 1681 // by dynamic class loading. Be sure to test the "static" receiver
duke@435 1682 // dest_method here, as opposed to the actual receiver, which may
duke@435 1683 // falsely lead us to believe that the receiver is final or private.
duke@435 1684 dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);
duke@435 1685 }
duke@435 1686 code = Bytecodes::_invokespecial;
duke@435 1687 }
duke@435 1688 // check if we could do inlining
duke@435 1689 if (!PatchALot && Inline && klass->is_loaded() &&
duke@435 1690 (klass->is_initialized() || klass->is_interface() && target->holder()->is_initialized())
duke@435 1691 && target->will_link(klass, callee_holder, code)) {
duke@435 1692 // callee is known => check if we have static binding
duke@435 1693 assert(target->is_loaded(), "callee must be known");
duke@435 1694 if (code == Bytecodes::_invokestatic
duke@435 1695 || code == Bytecodes::_invokespecial
duke@435 1696 || code == Bytecodes::_invokevirtual && target->is_final_method()
duke@435 1697 ) {
duke@435 1698 // static binding => check if callee is ok
duke@435 1699 ciMethod* inline_target = (cha_monomorphic_target != NULL)
duke@435 1700 ? cha_monomorphic_target
duke@435 1701 : target;
duke@435 1702 bool res = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL));
duke@435 1703 CHECK_BAILOUT();
duke@435 1704
duke@435 1705 #ifndef PRODUCT
duke@435 1706 // printing
duke@435 1707 if (PrintInlining && !res) {
duke@435 1708 // if it was successfully inlined, then it was already printed.
duke@435 1709 print_inline_result(inline_target, res);
duke@435 1710 }
duke@435 1711 #endif
duke@435 1712 clear_inline_bailout();
duke@435 1713 if (res) {
duke@435 1714 // Register dependence if JVMTI has either breakpoint
duke@435 1715 // setting or hotswapping of methods capabilities since they may
duke@435 1716 // cause deoptimization.
kvn@1215 1717 if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
duke@435 1718 dependency_recorder()->assert_evol_method(inline_target);
duke@435 1719 }
duke@435 1720 return;
duke@435 1721 }
duke@435 1722 }
duke@435 1723 }
duke@435 1724 // If we attempted an inline which did not succeed because of a
duke@435 1725 // bailout during construction of the callee graph, the entire
duke@435 1726 // compilation has to be aborted. This is fairly rare and currently
duke@435 1727 // seems to only occur for jasm-generated classes which contain
duke@435 1728 // jsr/ret pairs which are not associated with finally clauses and
duke@435 1729 // do not have exception handlers in the containing method, and are
duke@435 1730 // therefore not caught early enough to abort the inlining without
duke@435 1731 // corrupting the graph. (We currently bail out with a non-empty
duke@435 1732 // stack at a ret in these situations.)
duke@435 1733 CHECK_BAILOUT();
duke@435 1734
duke@435 1735 // inlining not successful => standard invoke
twisti@1730 1736 bool is_loaded = target->is_loaded();
twisti@1730 1737 bool has_receiver =
twisti@1730 1738 code == Bytecodes::_invokespecial ||
twisti@1730 1739 code == Bytecodes::_invokevirtual ||
twisti@1730 1740 code == Bytecodes::_invokeinterface;
twisti@1730 1741 bool is_invokedynamic = code == Bytecodes::_invokedynamic;
duke@435 1742 ValueType* result_type = as_ValueType(target->return_type());
twisti@1730 1743
twisti@1730 1744 // We require the debug info to be the "state before" because
twisti@1730 1745 // invokedynamics may deoptimize.
roland@2174 1746 ValueStack* state_before = is_invokedynamic ? copy_state_before() : copy_state_exhandling();
twisti@1730 1747
duke@435 1748 Values* args = state()->pop_arguments(target->arg_size_no_receiver());
twisti@1730 1749 Value recv = has_receiver ? apop() : NULL;
duke@435 1750 int vtable_index = methodOopDesc::invalid_vtable_index;
duke@435 1751
duke@435 1752 #ifdef SPARC
duke@435 1753 // Currently only supported on Sparc.
duke@435 1754 // The UseInlineCaches only controls dispatch to invokevirtuals for
duke@435 1755 // loaded classes which we weren't able to statically bind.
duke@435 1756 if (!UseInlineCaches && is_loaded && code == Bytecodes::_invokevirtual
duke@435 1757 && !target->can_be_statically_bound()) {
duke@435 1758 // Find a vtable index if one is available
duke@435 1759 vtable_index = target->resolve_vtable_index(calling_klass, callee_holder);
duke@435 1760 }
duke@435 1761 #endif
duke@435 1762
duke@435 1763 if (recv != NULL &&
duke@435 1764 (code == Bytecodes::_invokespecial ||
iveresov@2138 1765 !is_loaded || target->is_final())) {
duke@435 1766 // invokespecial always needs a NULL check. invokevirtual where
duke@435 1767 // the target is final or where it's not known that whether the
duke@435 1768 // target is final requires a NULL check. Otherwise normal
duke@435 1769 // invokevirtual will perform the null check during the lookup
duke@435 1770 // logic or the unverified entry point. Profiling of calls
duke@435 1771 // requires that the null check is performed in all cases.
duke@435 1772 null_check(recv);
duke@435 1773 }
duke@435 1774
iveresov@2138 1775 if (is_profiling()) {
iveresov@2138 1776 if (recv != NULL && profile_calls()) {
iveresov@2138 1777 null_check(recv);
duke@435 1778 }
iveresov@2138 1779 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2138 1780 compilation()->set_would_profile(true);
iveresov@2138 1781
iveresov@2138 1782 if (profile_calls()) {
iveresov@2138 1783 assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
iveresov@2138 1784 ciKlass* target_klass = NULL;
iveresov@2138 1785 if (cha_monomorphic_target != NULL) {
iveresov@2138 1786 target_klass = cha_monomorphic_target->holder();
iveresov@2138 1787 } else if (exact_target != NULL) {
iveresov@2138 1788 target_klass = exact_target->holder();
iveresov@2138 1789 }
iveresov@2138 1790 profile_call(recv, target_klass);
iveresov@2138 1791 }
duke@435 1792 }
duke@435 1793
twisti@1730 1794 Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
duke@435 1795 // push result
duke@435 1796 append_split(result);
duke@435 1797
duke@435 1798 if (result_type != voidType) {
duke@435 1799 if (method()->is_strict()) {
duke@435 1800 push(result_type, round_fp(result));
duke@435 1801 } else {
duke@435 1802 push(result_type, result);
duke@435 1803 }
duke@435 1804 }
duke@435 1805 }
duke@435 1806
duke@435 1807
duke@435 1808 void GraphBuilder::new_instance(int klass_index) {
roland@2174 1809 ValueStack* state_before = copy_state_exhandling();
duke@435 1810 bool will_link;
duke@435 1811 ciKlass* klass = stream()->get_klass(will_link);
duke@435 1812 assert(klass->is_instance_klass(), "must be an instance klass");
roland@2174 1813 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
duke@435 1814 _memory->new_instance(new_instance);
duke@435 1815 apush(append_split(new_instance));
duke@435 1816 }
duke@435 1817
duke@435 1818
duke@435 1819 void GraphBuilder::new_type_array() {
roland@2174 1820 ValueStack* state_before = copy_state_exhandling();
roland@2174 1821 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
duke@435 1822 }
duke@435 1823
duke@435 1824
duke@435 1825 void GraphBuilder::new_object_array() {
duke@435 1826 bool will_link;
duke@435 1827 ciKlass* klass = stream()->get_klass(will_link);
roland@2174 1828 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
duke@435 1829 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
duke@435 1830 apush(append_split(n));
duke@435 1831 }
duke@435 1832
duke@435 1833
duke@435 1834 bool GraphBuilder::direct_compare(ciKlass* k) {
duke@435 1835 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
duke@435 1836 ciInstanceKlass* ik = k->as_instance_klass();
duke@435 1837 if (ik->is_final()) {
duke@435 1838 return true;
duke@435 1839 } else {
duke@435 1840 if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
duke@435 1841 // test class is leaf class
duke@435 1842 dependency_recorder()->assert_leaf_type(ik);
duke@435 1843 return true;
duke@435 1844 }
duke@435 1845 }
duke@435 1846 }
duke@435 1847 return false;
duke@435 1848 }
duke@435 1849
duke@435 1850
duke@435 1851 void GraphBuilder::check_cast(int klass_index) {
duke@435 1852 bool will_link;
duke@435 1853 ciKlass* klass = stream()->get_klass(will_link);
roland@2174 1854 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
duke@435 1855 CheckCast* c = new CheckCast(klass, apop(), state_before);
duke@435 1856 apush(append_split(c));
duke@435 1857 c->set_direct_compare(direct_compare(klass));
iveresov@2138 1858
iveresov@2138 1859 if (is_profiling()) {
iveresov@2138 1860 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2138 1861 compilation()->set_would_profile(true);
iveresov@2138 1862
iveresov@2138 1863 if (profile_checkcasts()) {
iveresov@2138 1864 c->set_profiled_method(method());
iveresov@2138 1865 c->set_profiled_bci(bci());
iveresov@2138 1866 c->set_should_profile(true);
iveresov@2138 1867 }
duke@435 1868 }
duke@435 1869 }
duke@435 1870
duke@435 1871
duke@435 1872 void GraphBuilder::instance_of(int klass_index) {
duke@435 1873 bool will_link;
duke@435 1874 ciKlass* klass = stream()->get_klass(will_link);
roland@2174 1875 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
duke@435 1876 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
duke@435 1877 ipush(append_split(i));
duke@435 1878 i->set_direct_compare(direct_compare(klass));
iveresov@2146 1879
iveresov@2146 1880 if (is_profiling()) {
iveresov@2146 1881 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2146 1882 compilation()->set_would_profile(true);
iveresov@2146 1883
iveresov@2146 1884 if (profile_checkcasts()) {
iveresov@2146 1885 i->set_profiled_method(method());
iveresov@2146 1886 i->set_profiled_bci(bci());
iveresov@2146 1887 i->set_should_profile(true);
iveresov@2146 1888 }
iveresov@2146 1889 }
duke@435 1890 }
duke@435 1891
duke@435 1892
duke@435 1893 void GraphBuilder::monitorenter(Value x, int bci) {
duke@435 1894 // save state before locking in case of deoptimization after a NullPointerException
roland@2174 1895 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
roland@2174 1896 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
duke@435 1897 kill_all();
duke@435 1898 }
duke@435 1899
duke@435 1900
duke@435 1901 void GraphBuilder::monitorexit(Value x, int bci) {
duke@435 1902 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
duke@435 1903 kill_all();
duke@435 1904 }
duke@435 1905
duke@435 1906
duke@435 1907 void GraphBuilder::new_multi_array(int dimensions) {
duke@435 1908 bool will_link;
duke@435 1909 ciKlass* klass = stream()->get_klass(will_link);
roland@2174 1910 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
duke@435 1911
duke@435 1912 Values* dims = new Values(dimensions, NULL);
duke@435 1913 // fill in all dimensions
duke@435 1914 int i = dimensions;
duke@435 1915 while (i-- > 0) dims->at_put(i, ipop());
duke@435 1916 // create array
duke@435 1917 NewArray* n = new NewMultiArray(klass, dims, state_before);
duke@435 1918 apush(append_split(n));
duke@435 1919 }
duke@435 1920
duke@435 1921
duke@435 1922 void GraphBuilder::throw_op(int bci) {
duke@435 1923 // We require that the debug info for a Throw be the "state before"
duke@435 1924 // the Throw (i.e., exception oop is still on TOS)
roland@2174 1925 ValueStack* state_before = copy_state_before_with_bci(bci);
duke@435 1926 Throw* t = new Throw(apop(), state_before);
roland@2174 1927 // operand stack not needed after a throw
roland@2174 1928 state()->truncate_stack(0);
duke@435 1929 append_with_bci(t, bci);
duke@435 1930 }
duke@435 1931
duke@435 1932
duke@435 1933 Value GraphBuilder::round_fp(Value fp_value) {
duke@435 1934 // no rounding needed if SSE2 is used
duke@435 1935 if (RoundFPResults && UseSSE < 2) {
duke@435 1936 // Must currently insert rounding node for doubleword values that
duke@435 1937 // are results of expressions (i.e., not loads from memory or
duke@435 1938 // constants)
duke@435 1939 if (fp_value->type()->tag() == doubleTag &&
duke@435 1940 fp_value->as_Constant() == NULL &&
duke@435 1941 fp_value->as_Local() == NULL && // method parameters need no rounding
duke@435 1942 fp_value->as_RoundFP() == NULL) {
duke@435 1943 return append(new RoundFP(fp_value));
duke@435 1944 }
duke@435 1945 }
duke@435 1946 return fp_value;
duke@435 1947 }
duke@435 1948
duke@435 1949
duke@435 1950 Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {
iveresov@2138 1951 Canonicalizer canon(compilation(), instr, bci);
duke@435 1952 Instruction* i1 = canon.canonical();
roland@2174 1953 if (i1->is_linked() || !i1->can_be_linked()) {
duke@435 1954 // Canonicalizer returned an instruction which was already
duke@435 1955 // appended so simply return it.
duke@435 1956 return i1;
roland@2174 1957 }
roland@2174 1958
roland@2174 1959 if (UseLocalValueNumbering) {
duke@435 1960 // Lookup the instruction in the ValueMap and add it to the map if
duke@435 1961 // it's not found.
duke@435 1962 Instruction* i2 = vmap()->find_insert(i1);
duke@435 1963 if (i2 != i1) {
duke@435 1964 // found an entry in the value map, so just return it.
roland@2174 1965 assert(i2->is_linked(), "should already be linked");
duke@435 1966 return i2;
duke@435 1967 }
never@894 1968 ValueNumberingEffects vne(vmap());
never@894 1969 i1->visit(&vne);
duke@435 1970 }
duke@435 1971
roland@2174 1972 // i1 was not eliminated => append it
roland@2174 1973 assert(i1->next() == NULL, "shouldn't already be linked");
roland@2174 1974 _last = _last->set_next(i1, canon.bci());
roland@2174 1975
roland@2174 1976 if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {
roland@2174 1977 // set the bailout state but complete normal processing. We
roland@2174 1978 // might do a little more work before noticing the bailout so we
roland@2174 1979 // want processing to continue normally until it's noticed.
roland@2174 1980 bailout("Method and/or inlining is too large");
roland@2174 1981 }
roland@2174 1982
roland@2174 1983 #ifndef PRODUCT
roland@2174 1984 if (PrintIRDuringConstruction) {
roland@2174 1985 InstructionPrinter ip;
roland@2174 1986 ip.print_line(i1);
roland@2174 1987 if (Verbose) {
roland@2174 1988 state()->print();
duke@435 1989 }
roland@2174 1990 }
roland@2174 1991 #endif
roland@2174 1992
roland@2174 1993 // save state after modification of operand stack for StateSplit instructions
roland@2174 1994 StateSplit* s = i1->as_StateSplit();
roland@2174 1995 if (s != NULL) {
roland@2174 1996 if (EliminateFieldAccess) {
roland@2174 1997 Intrinsic* intrinsic = s->as_Intrinsic();
roland@2174 1998 if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
roland@2174 1999 _memory->kill();
duke@435 2000 }
duke@435 2001 }
roland@2174 2002 s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));
roland@2174 2003 }
roland@2174 2004
roland@2174 2005 // set up exception handlers for this instruction if necessary
roland@2174 2006 if (i1->can_trap()) {
roland@2174 2007 i1->set_exception_handlers(handle_exception(i1));
roland@2174 2008 assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
duke@435 2009 }
duke@435 2010 return i1;
duke@435 2011 }
duke@435 2012
duke@435 2013
duke@435 2014 Instruction* GraphBuilder::append(Instruction* instr) {
duke@435 2015 assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");
duke@435 2016 return append_with_bci(instr, bci());
duke@435 2017 }
duke@435 2018
duke@435 2019
duke@435 2020 Instruction* GraphBuilder::append_split(StateSplit* instr) {
duke@435 2021 return append_with_bci(instr, bci());
duke@435 2022 }
duke@435 2023
duke@435 2024
duke@435 2025 void GraphBuilder::null_check(Value value) {
duke@435 2026 if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL) {
duke@435 2027 return;
duke@435 2028 } else {
duke@435 2029 Constant* con = value->as_Constant();
duke@435 2030 if (con) {
duke@435 2031 ObjectType* c = con->type()->as_ObjectType();
duke@435 2032 if (c && c->is_loaded()) {
duke@435 2033 ObjectConstant* oc = c->as_ObjectConstant();
duke@435 2034 if (!oc || !oc->value()->is_null_object()) {
duke@435 2035 return;
duke@435 2036 }
duke@435 2037 }
duke@435 2038 }
duke@435 2039 }
roland@2174 2040 append(new NullCheck(value, copy_state_for_exception()));
duke@435 2041 }
duke@435 2042
duke@435 2043
duke@435 2044
roland@2174 2045 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
roland@2174 2046 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
roland@2174 2047 assert(instruction->exception_state() == NULL
roland@2174 2048 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
roland@2174 2049 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->jvmti_can_access_local_variables()),
roland@2174 2050 "exception_state should be of exception kind");
duke@435 2051 return new XHandlers();
duke@435 2052 }
duke@435 2053
duke@435 2054 XHandlers* exception_handlers = new XHandlers();
duke@435 2055 ScopeData* cur_scope_data = scope_data();
roland@2174 2056 ValueStack* cur_state = instruction->state_before();
roland@2174 2057 ValueStack* prev_state = NULL;
duke@435 2058 int scope_count = 0;
duke@435 2059
roland@2174 2060 assert(cur_state != NULL, "state_before must be set");
duke@435 2061 do {
roland@2174 2062 int cur_bci = cur_state->bci();
roland@2174 2063 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
duke@435 2064 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
duke@435 2065
duke@435 2066 // join with all potential exception handlers
duke@435 2067 XHandlers* list = cur_scope_data->xhandlers();
duke@435 2068 const int n = list->length();
duke@435 2069 for (int i = 0; i < n; i++) {
duke@435 2070 XHandler* h = list->handler_at(i);
duke@435 2071 if (h->covers(cur_bci)) {
duke@435 2072 // h is a potential exception handler => join it
duke@435 2073 compilation()->set_has_exception_handlers(true);
duke@435 2074
duke@435 2075 BlockBegin* entry = h->entry_block();
duke@435 2076 if (entry == block()) {
duke@435 2077 // It's acceptable for an exception handler to cover itself
duke@435 2078 // but we don't handle that in the parser currently. It's
duke@435 2079 // very rare so we bailout instead of trying to handle it.
duke@435 2080 BAILOUT_("exception handler covers itself", exception_handlers);
duke@435 2081 }
duke@435 2082 assert(entry->bci() == h->handler_bci(), "must match");
duke@435 2083 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
duke@435 2084
duke@435 2085 // previously this was a BAILOUT, but this is not necessary
duke@435 2086 // now because asynchronous exceptions are not handled this way.
roland@2174 2087 assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");
duke@435 2088
duke@435 2089 // xhandler start with an empty expression stack
roland@2174 2090 if (cur_state->stack_size() != 0) {
roland@2174 2091 cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
roland@2174 2092 }
roland@2174 2093 if (instruction->exception_state() == NULL) {
roland@2174 2094 instruction->set_exception_state(cur_state);
roland@2174 2095 }
duke@435 2096
duke@435 2097 // Note: Usually this join must work. However, very
duke@435 2098 // complicated jsr-ret structures where we don't ret from
duke@435 2099 // the subroutine can cause the objects on the monitor
duke@435 2100 // stacks to not match because blocks can be parsed twice.
duke@435 2101 // The only test case we've seen so far which exhibits this
duke@435 2102 // problem is caught by the infinite recursion test in
duke@435 2103 // GraphBuilder::jsr() if the join doesn't work.
roland@2174 2104 if (!entry->try_merge(cur_state)) {
duke@435 2105 BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);
duke@435 2106 }
duke@435 2107
duke@435 2108 // add current state for correct handling of phi functions at begin of xhandler
roland@2174 2109 int phi_operand = entry->add_exception_state(cur_state);
duke@435 2110
duke@435 2111 // add entry to the list of xhandlers of this block
duke@435 2112 _block->add_exception_handler(entry);
duke@435 2113
duke@435 2114 // add back-edge from xhandler entry to this block
duke@435 2115 if (!entry->is_predecessor(_block)) {
duke@435 2116 entry->add_predecessor(_block);
duke@435 2117 }
duke@435 2118
duke@435 2119 // clone XHandler because phi_operand and scope_count can not be shared
duke@435 2120 XHandler* new_xhandler = new XHandler(h);
duke@435 2121 new_xhandler->set_phi_operand(phi_operand);
duke@435 2122 new_xhandler->set_scope_count(scope_count);
duke@435 2123 exception_handlers->append(new_xhandler);
duke@435 2124
duke@435 2125 // fill in exception handler subgraph lazily
duke@435 2126 assert(!entry->is_set(BlockBegin::was_visited_flag), "entry must not be visited yet");
duke@435 2127 cur_scope_data->add_to_work_list(entry);
duke@435 2128
duke@435 2129 // stop when reaching catchall
duke@435 2130 if (h->catch_type() == 0) {
duke@435 2131 return exception_handlers;
duke@435 2132 }
duke@435 2133 }
duke@435 2134 }
duke@435 2135
roland@2174 2136 if (exception_handlers->length() == 0) {
roland@2174 2137 // This scope and all callees do not handle exceptions, so the local
roland@2174 2138 // variables of this scope are not needed. However, the scope itself is
roland@2174 2139 // required for a correct exception stack trace -> clear out the locals.
roland@2174 2140 if (_compilation->env()->jvmti_can_access_local_variables()) {
roland@2174 2141 cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
roland@2174 2142 } else {
roland@2174 2143 cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());
roland@2174 2144 }
roland@2174 2145 if (prev_state != NULL) {
roland@2174 2146 prev_state->set_caller_state(cur_state);
roland@2174 2147 }
roland@2174 2148 if (instruction->exception_state() == NULL) {
roland@2174 2149 instruction->set_exception_state(cur_state);
roland@2174 2150 }
roland@2174 2151 }
roland@2174 2152
duke@435 2153 // Set up iteration for next time.
duke@435 2154 // If parsing a jsr, do not grab exception handlers from the
duke@435 2155 // parent scopes for this method (already got them, and they
duke@435 2156 // needed to be cloned)
roland@2174 2157
roland@2174 2158 while (cur_scope_data->parsing_jsr()) {
roland@2174 2159 cur_scope_data = cur_scope_data->parent();
duke@435 2160 }
roland@2174 2161
roland@2174 2162 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
roland@2174 2163 assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");
roland@2174 2164
roland@2174 2165 prev_state = cur_state;
roland@2174 2166 cur_state = cur_state->caller_state();
roland@2174 2167 cur_scope_data = cur_scope_data->parent();
roland@2174 2168 scope_count++;
duke@435 2169 } while (cur_scope_data != NULL);
duke@435 2170
duke@435 2171 return exception_handlers;
duke@435 2172 }
duke@435 2173
duke@435 2174
duke@435 2175 // Helper class for simplifying Phis.
duke@435 2176 class PhiSimplifier : public BlockClosure {
duke@435 2177 private:
duke@435 2178 bool _has_substitutions;
duke@435 2179 Value simplify(Value v);
duke@435 2180
duke@435 2181 public:
duke@435 2182 PhiSimplifier(BlockBegin* start) : _has_substitutions(false) {
duke@435 2183 start->iterate_preorder(this);
duke@435 2184 if (_has_substitutions) {
duke@435 2185 SubstitutionResolver sr(start);
duke@435 2186 }
duke@435 2187 }
duke@435 2188 void block_do(BlockBegin* b);
duke@435 2189 bool has_substitutions() const { return _has_substitutions; }
duke@435 2190 };
duke@435 2191
duke@435 2192
duke@435 2193 Value PhiSimplifier::simplify(Value v) {
duke@435 2194 Phi* phi = v->as_Phi();
duke@435 2195
duke@435 2196 if (phi == NULL) {
duke@435 2197 // no phi function
duke@435 2198 return v;
duke@435 2199 } else if (v->has_subst()) {
duke@435 2200 // already substituted; subst can be phi itself -> simplify
duke@435 2201 return simplify(v->subst());
duke@435 2202 } else if (phi->is_set(Phi::cannot_simplify)) {
duke@435 2203 // already tried to simplify phi before
duke@435 2204 return phi;
duke@435 2205 } else if (phi->is_set(Phi::visited)) {
duke@435 2206 // break cycles in phi functions
duke@435 2207 return phi;
duke@435 2208 } else if (phi->type()->is_illegal()) {
duke@435 2209 // illegal phi functions are ignored anyway
duke@435 2210 return phi;
duke@435 2211
duke@435 2212 } else {
duke@435 2213 // mark phi function as processed to break cycles in phi functions
duke@435 2214 phi->set(Phi::visited);
duke@435 2215
duke@435 2216 // simplify x = [y, x] and x = [y, y] to y
duke@435 2217 Value subst = NULL;
duke@435 2218 int opd_count = phi->operand_count();
duke@435 2219 for (int i = 0; i < opd_count; i++) {
duke@435 2220 Value opd = phi->operand_at(i);
duke@435 2221 assert(opd != NULL, "Operand must exist!");
duke@435 2222
duke@435 2223 if (opd->type()->is_illegal()) {
duke@435 2224 // if one operand is illegal, the entire phi function is illegal
duke@435 2225 phi->make_illegal();
duke@435 2226 phi->clear(Phi::visited);
duke@435 2227 return phi;
duke@435 2228 }
duke@435 2229
duke@435 2230 Value new_opd = simplify(opd);
duke@435 2231 assert(new_opd != NULL, "Simplified operand must exist!");
duke@435 2232
duke@435 2233 if (new_opd != phi && new_opd != subst) {
duke@435 2234 if (subst == NULL) {
duke@435 2235 subst = new_opd;
duke@435 2236 } else {
duke@435 2237 // no simplification possible
duke@435 2238 phi->set(Phi::cannot_simplify);
duke@435 2239 phi->clear(Phi::visited);
duke@435 2240 return phi;
duke@435 2241 }
duke@435 2242 }
duke@435 2243 }
duke@435 2244
duke@435 2245 // sucessfully simplified phi function
duke@435 2246 assert(subst != NULL, "illegal phi function");
duke@435 2247 _has_substitutions = true;
duke@435 2248 phi->clear(Phi::visited);
duke@435 2249 phi->set_subst(subst);
duke@435 2250
duke@435 2251 #ifndef PRODUCT
duke@435 2252 if (PrintPhiFunctions) {
duke@435 2253 tty->print_cr("simplified phi function %c%d to %c%d (Block B%d)", phi->type()->tchar(), phi->id(), subst->type()->tchar(), subst->id(), phi->block()->block_id());
duke@435 2254 }
duke@435 2255 #endif
duke@435 2256
duke@435 2257 return subst;
duke@435 2258 }
duke@435 2259 }
duke@435 2260
duke@435 2261
duke@435 2262 void PhiSimplifier::block_do(BlockBegin* b) {
duke@435 2263 for_each_phi_fun(b, phi,
duke@435 2264 simplify(phi);
duke@435 2265 );
duke@435 2266
duke@435 2267 #ifdef ASSERT
duke@435 2268 for_each_phi_fun(b, phi,
duke@435 2269 assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification");
duke@435 2270 );
duke@435 2271
duke@435 2272 ValueStack* state = b->state()->caller_state();
roland@2174 2273 for_each_state_value(state, value,
roland@2174 2274 Phi* phi = value->as_Phi();
roland@2174 2275 assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
roland@2174 2276 );
duke@435 2277 #endif
duke@435 2278 }
duke@435 2279
duke@435 2280 // This method is called after all blocks are filled with HIR instructions
duke@435 2281 // It eliminates all Phi functions of the form x = [y, y] and x = [y, x]
duke@435 2282 void GraphBuilder::eliminate_redundant_phis(BlockBegin* start) {
duke@435 2283 PhiSimplifier simplifier(start);
duke@435 2284 }
duke@435 2285
duke@435 2286
duke@435 2287 void GraphBuilder::connect_to_end(BlockBegin* beg) {
duke@435 2288 // setup iteration
duke@435 2289 kill_all();
duke@435 2290 _block = beg;
roland@2174 2291 _state = beg->state()->copy_for_parsing();
duke@435 2292 _last = beg;
duke@435 2293 iterate_bytecodes_for_block(beg->bci());
duke@435 2294 }
duke@435 2295
duke@435 2296
duke@435 2297 BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
duke@435 2298 #ifndef PRODUCT
duke@435 2299 if (PrintIRDuringConstruction) {
duke@435 2300 tty->cr();
duke@435 2301 InstructionPrinter ip;
duke@435 2302 ip.print_instr(_block); tty->cr();
duke@435 2303 ip.print_stack(_block->state()); tty->cr();
duke@435 2304 ip.print_inline_level(_block);
duke@435 2305 ip.print_head();
duke@435 2306 tty->print_cr("locals size: %d stack size: %d", state()->locals_size(), state()->stack_size());
duke@435 2307 }
duke@435 2308 #endif
duke@435 2309 _skip_block = false;
duke@435 2310 assert(state() != NULL, "ValueStack missing!");
duke@435 2311 ciBytecodeStream s(method());
duke@435 2312 s.reset_to_bci(bci);
duke@435 2313 int prev_bci = bci;
duke@435 2314 scope_data()->set_stream(&s);
duke@435 2315 // iterate
duke@435 2316 Bytecodes::Code code = Bytecodes::_illegal;
duke@435 2317 bool push_exception = false;
duke@435 2318
duke@435 2319 if (block()->is_set(BlockBegin::exception_entry_flag) && block()->next() == NULL) {
duke@435 2320 // first thing in the exception entry block should be the exception object.
duke@435 2321 push_exception = true;
duke@435 2322 }
duke@435 2323
duke@435 2324 while (!bailed_out() && last()->as_BlockEnd() == NULL &&
duke@435 2325 (code = stream()->next()) != ciBytecodeStream::EOBC() &&
duke@435 2326 (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {
roland@2174 2327 assert(state()->kind() == ValueStack::Parsing, "invalid state kind");
duke@435 2328
duke@435 2329 // Check for active jsr during OSR compilation
duke@435 2330 if (compilation()->is_osr_compile()
duke@435 2331 && scope()->is_top_scope()
duke@435 2332 && parsing_jsr()
duke@435 2333 && s.cur_bci() == compilation()->osr_bci()) {
duke@435 2334 bailout("OSR not supported while a jsr is active");
duke@435 2335 }
duke@435 2336
duke@435 2337 if (push_exception) {
duke@435 2338 apush(append(new ExceptionObject()));
duke@435 2339 push_exception = false;
duke@435 2340 }
duke@435 2341
duke@435 2342 // handle bytecode
duke@435 2343 switch (code) {
duke@435 2344 case Bytecodes::_nop : /* nothing to do */ break;
duke@435 2345 case Bytecodes::_aconst_null : apush(append(new Constant(objectNull ))); break;
duke@435 2346 case Bytecodes::_iconst_m1 : ipush(append(new Constant(new IntConstant (-1)))); break;
duke@435 2347 case Bytecodes::_iconst_0 : ipush(append(new Constant(intZero ))); break;
duke@435 2348 case Bytecodes::_iconst_1 : ipush(append(new Constant(intOne ))); break;
duke@435 2349 case Bytecodes::_iconst_2 : ipush(append(new Constant(new IntConstant ( 2)))); break;
duke@435 2350 case Bytecodes::_iconst_3 : ipush(append(new Constant(new IntConstant ( 3)))); break;
duke@435 2351 case Bytecodes::_iconst_4 : ipush(append(new Constant(new IntConstant ( 4)))); break;
duke@435 2352 case Bytecodes::_iconst_5 : ipush(append(new Constant(new IntConstant ( 5)))); break;
duke@435 2353 case Bytecodes::_lconst_0 : lpush(append(new Constant(new LongConstant ( 0)))); break;
duke@435 2354 case Bytecodes::_lconst_1 : lpush(append(new Constant(new LongConstant ( 1)))); break;
duke@435 2355 case Bytecodes::_fconst_0 : fpush(append(new Constant(new FloatConstant ( 0)))); break;
duke@435 2356 case Bytecodes::_fconst_1 : fpush(append(new Constant(new FloatConstant ( 1)))); break;
duke@435 2357 case Bytecodes::_fconst_2 : fpush(append(new Constant(new FloatConstant ( 2)))); break;
duke@435 2358 case Bytecodes::_dconst_0 : dpush(append(new Constant(new DoubleConstant( 0)))); break;
duke@435 2359 case Bytecodes::_dconst_1 : dpush(append(new Constant(new DoubleConstant( 1)))); break;
duke@435 2360 case Bytecodes::_bipush : ipush(append(new Constant(new IntConstant(((signed char*)s.cur_bcp())[1])))); break;
duke@435 2361 case Bytecodes::_sipush : ipush(append(new Constant(new IntConstant((short)Bytes::get_Java_u2(s.cur_bcp()+1))))); break;
duke@435 2362 case Bytecodes::_ldc : // fall through
duke@435 2363 case Bytecodes::_ldc_w : // fall through
duke@435 2364 case Bytecodes::_ldc2_w : load_constant(); break;
duke@435 2365 case Bytecodes::_iload : load_local(intType , s.get_index()); break;
duke@435 2366 case Bytecodes::_lload : load_local(longType , s.get_index()); break;
duke@435 2367 case Bytecodes::_fload : load_local(floatType , s.get_index()); break;
duke@435 2368 case Bytecodes::_dload : load_local(doubleType , s.get_index()); break;
duke@435 2369 case Bytecodes::_aload : load_local(instanceType, s.get_index()); break;
duke@435 2370 case Bytecodes::_iload_0 : load_local(intType , 0); break;
duke@435 2371 case Bytecodes::_iload_1 : load_local(intType , 1); break;
duke@435 2372 case Bytecodes::_iload_2 : load_local(intType , 2); break;
duke@435 2373 case Bytecodes::_iload_3 : load_local(intType , 3); break;
duke@435 2374 case Bytecodes::_lload_0 : load_local(longType , 0); break;
duke@435 2375 case Bytecodes::_lload_1 : load_local(longType , 1); break;
duke@435 2376 case Bytecodes::_lload_2 : load_local(longType , 2); break;
duke@435 2377 case Bytecodes::_lload_3 : load_local(longType , 3); break;
duke@435 2378 case Bytecodes::_fload_0 : load_local(floatType , 0); break;
duke@435 2379 case Bytecodes::_fload_1 : load_local(floatType , 1); break;
duke@435 2380 case Bytecodes::_fload_2 : load_local(floatType , 2); break;
duke@435 2381 case Bytecodes::_fload_3 : load_local(floatType , 3); break;
duke@435 2382 case Bytecodes::_dload_0 : load_local(doubleType, 0); break;
duke@435 2383 case Bytecodes::_dload_1 : load_local(doubleType, 1); break;
duke@435 2384 case Bytecodes::_dload_2 : load_local(doubleType, 2); break;
duke@435 2385 case Bytecodes::_dload_3 : load_local(doubleType, 3); break;
duke@435 2386 case Bytecodes::_aload_0 : load_local(objectType, 0); break;
duke@435 2387 case Bytecodes::_aload_1 : load_local(objectType, 1); break;
duke@435 2388 case Bytecodes::_aload_2 : load_local(objectType, 2); break;
duke@435 2389 case Bytecodes::_aload_3 : load_local(objectType, 3); break;
duke@435 2390 case Bytecodes::_iaload : load_indexed(T_INT ); break;
duke@435 2391 case Bytecodes::_laload : load_indexed(T_LONG ); break;
duke@435 2392 case Bytecodes::_faload : load_indexed(T_FLOAT ); break;
duke@435 2393 case Bytecodes::_daload : load_indexed(T_DOUBLE); break;
duke@435 2394 case Bytecodes::_aaload : load_indexed(T_OBJECT); break;
duke@435 2395 case Bytecodes::_baload : load_indexed(T_BYTE ); break;
duke@435 2396 case Bytecodes::_caload : load_indexed(T_CHAR ); break;
duke@435 2397 case Bytecodes::_saload : load_indexed(T_SHORT ); break;
duke@435 2398 case Bytecodes::_istore : store_local(intType , s.get_index()); break;
duke@435 2399 case Bytecodes::_lstore : store_local(longType , s.get_index()); break;
duke@435 2400 case Bytecodes::_fstore : store_local(floatType , s.get_index()); break;
duke@435 2401 case Bytecodes::_dstore : store_local(doubleType, s.get_index()); break;
duke@435 2402 case Bytecodes::_astore : store_local(objectType, s.get_index()); break;
duke@435 2403 case Bytecodes::_istore_0 : store_local(intType , 0); break;
duke@435 2404 case Bytecodes::_istore_1 : store_local(intType , 1); break;
duke@435 2405 case Bytecodes::_istore_2 : store_local(intType , 2); break;
duke@435 2406 case Bytecodes::_istore_3 : store_local(intType , 3); break;
duke@435 2407 case Bytecodes::_lstore_0 : store_local(longType , 0); break;
duke@435 2408 case Bytecodes::_lstore_1 : store_local(longType , 1); break;
duke@435 2409 case Bytecodes::_lstore_2 : store_local(longType , 2); break;
duke@435 2410 case Bytecodes::_lstore_3 : store_local(longType , 3); break;
duke@435 2411 case Bytecodes::_fstore_0 : store_local(floatType , 0); break;
duke@435 2412 case Bytecodes::_fstore_1 : store_local(floatType , 1); break;
duke@435 2413 case Bytecodes::_fstore_2 : store_local(floatType , 2); break;
duke@435 2414 case Bytecodes::_fstore_3 : store_local(floatType , 3); break;
duke@435 2415 case Bytecodes::_dstore_0 : store_local(doubleType, 0); break;
duke@435 2416 case Bytecodes::_dstore_1 : store_local(doubleType, 1); break;
duke@435 2417 case Bytecodes::_dstore_2 : store_local(doubleType, 2); break;
duke@435 2418 case Bytecodes::_dstore_3 : store_local(doubleType, 3); break;
duke@435 2419 case Bytecodes::_astore_0 : store_local(objectType, 0); break;
duke@435 2420 case Bytecodes::_astore_1 : store_local(objectType, 1); break;
duke@435 2421 case Bytecodes::_astore_2 : store_local(objectType, 2); break;
duke@435 2422 case Bytecodes::_astore_3 : store_local(objectType, 3); break;
duke@435 2423 case Bytecodes::_iastore : store_indexed(T_INT ); break;
duke@435 2424 case Bytecodes::_lastore : store_indexed(T_LONG ); break;
duke@435 2425 case Bytecodes::_fastore : store_indexed(T_FLOAT ); break;
duke@435 2426 case Bytecodes::_dastore : store_indexed(T_DOUBLE); break;
duke@435 2427 case Bytecodes::_aastore : store_indexed(T_OBJECT); break;
duke@435 2428 case Bytecodes::_bastore : store_indexed(T_BYTE ); break;
duke@435 2429 case Bytecodes::_castore : store_indexed(T_CHAR ); break;
duke@435 2430 case Bytecodes::_sastore : store_indexed(T_SHORT ); break;
duke@435 2431 case Bytecodes::_pop : // fall through
duke@435 2432 case Bytecodes::_pop2 : // fall through
duke@435 2433 case Bytecodes::_dup : // fall through
duke@435 2434 case Bytecodes::_dup_x1 : // fall through
duke@435 2435 case Bytecodes::_dup_x2 : // fall through
duke@435 2436 case Bytecodes::_dup2 : // fall through
duke@435 2437 case Bytecodes::_dup2_x1 : // fall through
duke@435 2438 case Bytecodes::_dup2_x2 : // fall through
duke@435 2439 case Bytecodes::_swap : stack_op(code); break;
duke@435 2440 case Bytecodes::_iadd : arithmetic_op(intType , code); break;
duke@435 2441 case Bytecodes::_ladd : arithmetic_op(longType , code); break;
duke@435 2442 case Bytecodes::_fadd : arithmetic_op(floatType , code); break;
duke@435 2443 case Bytecodes::_dadd : arithmetic_op(doubleType, code); break;
duke@435 2444 case Bytecodes::_isub : arithmetic_op(intType , code); break;
duke@435 2445 case Bytecodes::_lsub : arithmetic_op(longType , code); break;
duke@435 2446 case Bytecodes::_fsub : arithmetic_op(floatType , code); break;
duke@435 2447 case Bytecodes::_dsub : arithmetic_op(doubleType, code); break;
duke@435 2448 case Bytecodes::_imul : arithmetic_op(intType , code); break;
duke@435 2449 case Bytecodes::_lmul : arithmetic_op(longType , code); break;
duke@435 2450 case Bytecodes::_fmul : arithmetic_op(floatType , code); break;
duke@435 2451 case Bytecodes::_dmul : arithmetic_op(doubleType, code); break;
roland@2174 2452 case Bytecodes::_idiv : arithmetic_op(intType , code, copy_state_for_exception()); break;
roland@2174 2453 case Bytecodes::_ldiv : arithmetic_op(longType , code, copy_state_for_exception()); break;
duke@435 2454 case Bytecodes::_fdiv : arithmetic_op(floatType , code); break;
duke@435 2455 case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break;
roland@2174 2456 case Bytecodes::_irem : arithmetic_op(intType , code, copy_state_for_exception()); break;
roland@2174 2457 case Bytecodes::_lrem : arithmetic_op(longType , code, copy_state_for_exception()); break;
duke@435 2458 case Bytecodes::_frem : arithmetic_op(floatType , code); break;
duke@435 2459 case Bytecodes::_drem : arithmetic_op(doubleType, code); break;
duke@435 2460 case Bytecodes::_ineg : negate_op(intType ); break;
duke@435 2461 case Bytecodes::_lneg : negate_op(longType ); break;
duke@435 2462 case Bytecodes::_fneg : negate_op(floatType ); break;
duke@435 2463 case Bytecodes::_dneg : negate_op(doubleType); break;
duke@435 2464 case Bytecodes::_ishl : shift_op(intType , code); break;
duke@435 2465 case Bytecodes::_lshl : shift_op(longType, code); break;
duke@435 2466 case Bytecodes::_ishr : shift_op(intType , code); break;
duke@435 2467 case Bytecodes::_lshr : shift_op(longType, code); break;
duke@435 2468 case Bytecodes::_iushr : shift_op(intType , code); break;
duke@435 2469 case Bytecodes::_lushr : shift_op(longType, code); break;
duke@435 2470 case Bytecodes::_iand : logic_op(intType , code); break;
duke@435 2471 case Bytecodes::_land : logic_op(longType, code); break;
duke@435 2472 case Bytecodes::_ior : logic_op(intType , code); break;
duke@435 2473 case Bytecodes::_lor : logic_op(longType, code); break;
duke@435 2474 case Bytecodes::_ixor : logic_op(intType , code); break;
duke@435 2475 case Bytecodes::_lxor : logic_op(longType, code); break;
duke@435 2476 case Bytecodes::_iinc : increment(); break;
duke@435 2477 case Bytecodes::_i2l : convert(code, T_INT , T_LONG ); break;
duke@435 2478 case Bytecodes::_i2f : convert(code, T_INT , T_FLOAT ); break;
duke@435 2479 case Bytecodes::_i2d : convert(code, T_INT , T_DOUBLE); break;
duke@435 2480 case Bytecodes::_l2i : convert(code, T_LONG , T_INT ); break;
duke@435 2481 case Bytecodes::_l2f : convert(code, T_LONG , T_FLOAT ); break;
duke@435 2482 case Bytecodes::_l2d : convert(code, T_LONG , T_DOUBLE); break;
duke@435 2483 case Bytecodes::_f2i : convert(code, T_FLOAT , T_INT ); break;
duke@435 2484 case Bytecodes::_f2l : convert(code, T_FLOAT , T_LONG ); break;
duke@435 2485 case Bytecodes::_f2d : convert(code, T_FLOAT , T_DOUBLE); break;
duke@435 2486 case Bytecodes::_d2i : convert(code, T_DOUBLE, T_INT ); break;
duke@435 2487 case Bytecodes::_d2l : convert(code, T_DOUBLE, T_LONG ); break;
duke@435 2488 case Bytecodes::_d2f : convert(code, T_DOUBLE, T_FLOAT ); break;
duke@435 2489 case Bytecodes::_i2b : convert(code, T_INT , T_BYTE ); break;
duke@435 2490 case Bytecodes::_i2c : convert(code, T_INT , T_CHAR ); break;
duke@435 2491 case Bytecodes::_i2s : convert(code, T_INT , T_SHORT ); break;
duke@435 2492 case Bytecodes::_lcmp : compare_op(longType , code); break;
duke@435 2493 case Bytecodes::_fcmpl : compare_op(floatType , code); break;
duke@435 2494 case Bytecodes::_fcmpg : compare_op(floatType , code); break;
duke@435 2495 case Bytecodes::_dcmpl : compare_op(doubleType, code); break;
duke@435 2496 case Bytecodes::_dcmpg : compare_op(doubleType, code); break;
duke@435 2497 case Bytecodes::_ifeq : if_zero(intType , If::eql); break;
duke@435 2498 case Bytecodes::_ifne : if_zero(intType , If::neq); break;
duke@435 2499 case Bytecodes::_iflt : if_zero(intType , If::lss); break;
duke@435 2500 case Bytecodes::_ifge : if_zero(intType , If::geq); break;
duke@435 2501 case Bytecodes::_ifgt : if_zero(intType , If::gtr); break;
duke@435 2502 case Bytecodes::_ifle : if_zero(intType , If::leq); break;
duke@435 2503 case Bytecodes::_if_icmpeq : if_same(intType , If::eql); break;
duke@435 2504 case Bytecodes::_if_icmpne : if_same(intType , If::neq); break;
duke@435 2505 case Bytecodes::_if_icmplt : if_same(intType , If::lss); break;
duke@435 2506 case Bytecodes::_if_icmpge : if_same(intType , If::geq); break;
duke@435 2507 case Bytecodes::_if_icmpgt : if_same(intType , If::gtr); break;
duke@435 2508 case Bytecodes::_if_icmple : if_same(intType , If::leq); break;
duke@435 2509 case Bytecodes::_if_acmpeq : if_same(objectType, If::eql); break;
duke@435 2510 case Bytecodes::_if_acmpne : if_same(objectType, If::neq); break;
duke@435 2511 case Bytecodes::_goto : _goto(s.cur_bci(), s.get_dest()); break;
duke@435 2512 case Bytecodes::_jsr : jsr(s.get_dest()); break;
duke@435 2513 case Bytecodes::_ret : ret(s.get_index()); break;
duke@435 2514 case Bytecodes::_tableswitch : table_switch(); break;
duke@435 2515 case Bytecodes::_lookupswitch : lookup_switch(); break;
duke@435 2516 case Bytecodes::_ireturn : method_return(ipop()); break;
duke@435 2517 case Bytecodes::_lreturn : method_return(lpop()); break;
duke@435 2518 case Bytecodes::_freturn : method_return(fpop()); break;
duke@435 2519 case Bytecodes::_dreturn : method_return(dpop()); break;
duke@435 2520 case Bytecodes::_areturn : method_return(apop()); break;
duke@435 2521 case Bytecodes::_return : method_return(NULL ); break;
duke@435 2522 case Bytecodes::_getstatic : // fall through
duke@435 2523 case Bytecodes::_putstatic : // fall through
duke@435 2524 case Bytecodes::_getfield : // fall through
duke@435 2525 case Bytecodes::_putfield : access_field(code); break;
duke@435 2526 case Bytecodes::_invokevirtual : // fall through
duke@435 2527 case Bytecodes::_invokespecial : // fall through
duke@435 2528 case Bytecodes::_invokestatic : // fall through
jrose@1161 2529 case Bytecodes::_invokedynamic : // fall through
duke@435 2530 case Bytecodes::_invokeinterface: invoke(code); break;
jrose@1920 2531 case Bytecodes::_new : new_instance(s.get_index_u2()); break;
duke@435 2532 case Bytecodes::_newarray : new_type_array(); break;
duke@435 2533 case Bytecodes::_anewarray : new_object_array(); break;
roland@2174 2534 case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
duke@435 2535 case Bytecodes::_athrow : throw_op(s.cur_bci()); break;
jrose@1920 2536 case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break;
jrose@1920 2537 case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break;
duke@435 2538 case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break;
duke@435 2539 case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break;
duke@435 2540 case Bytecodes::_wide : ShouldNotReachHere(); break;
duke@435 2541 case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
duke@435 2542 case Bytecodes::_ifnull : if_null(objectType, If::eql); break;
duke@435 2543 case Bytecodes::_ifnonnull : if_null(objectType, If::neq); break;
duke@435 2544 case Bytecodes::_goto_w : _goto(s.cur_bci(), s.get_far_dest()); break;
duke@435 2545 case Bytecodes::_jsr_w : jsr(s.get_far_dest()); break;
duke@435 2546 case Bytecodes::_breakpoint : BAILOUT_("concurrent setting of breakpoint", NULL);
duke@435 2547 default : ShouldNotReachHere(); break;
duke@435 2548 }
duke@435 2549 // save current bci to setup Goto at the end
duke@435 2550 prev_bci = s.cur_bci();
duke@435 2551 }
duke@435 2552 CHECK_BAILOUT_(NULL);
duke@435 2553 // stop processing of this block (see try_inline_full)
duke@435 2554 if (_skip_block) {
duke@435 2555 _skip_block = false;
duke@435 2556 assert(_last && _last->as_BlockEnd(), "");
duke@435 2557 return _last->as_BlockEnd();
duke@435 2558 }
duke@435 2559 // if there are any, check if last instruction is a BlockEnd instruction
duke@435 2560 BlockEnd* end = last()->as_BlockEnd();
duke@435 2561 if (end == NULL) {
duke@435 2562 // all blocks must end with a BlockEnd instruction => add a Goto
duke@435 2563 end = new Goto(block_at(s.cur_bci()), false);
roland@2174 2564 append(end);
duke@435 2565 }
duke@435 2566 assert(end == last()->as_BlockEnd(), "inconsistency");
duke@435 2567
roland@2174 2568 assert(end->state() != NULL, "state must already be present");
roland@2174 2569 assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");
duke@435 2570
duke@435 2571 // connect to begin & set state
duke@435 2572 // NOTE that inlining may have changed the block we are parsing
duke@435 2573 block()->set_end(end);
duke@435 2574 // propagate state
duke@435 2575 for (int i = end->number_of_sux() - 1; i >= 0; i--) {
duke@435 2576 BlockBegin* sux = end->sux_at(i);
duke@435 2577 assert(sux->is_predecessor(block()), "predecessor missing");
duke@435 2578 // be careful, bailout if bytecodes are strange
roland@2174 2579 if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);
duke@435 2580 scope_data()->add_to_work_list(end->sux_at(i));
duke@435 2581 }
duke@435 2582
duke@435 2583 scope_data()->set_stream(NULL);
duke@435 2584
duke@435 2585 // done
duke@435 2586 return end;
duke@435 2587 }
duke@435 2588
duke@435 2589
duke@435 2590 void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining) {
duke@435 2591 do {
duke@435 2592 if (start_in_current_block_for_inlining && !bailed_out()) {
duke@435 2593 iterate_bytecodes_for_block(0);
duke@435 2594 start_in_current_block_for_inlining = false;
duke@435 2595 } else {
duke@435 2596 BlockBegin* b;
duke@435 2597 while ((b = scope_data()->remove_from_work_list()) != NULL) {
duke@435 2598 if (!b->is_set(BlockBegin::was_visited_flag)) {
duke@435 2599 if (b->is_set(BlockBegin::osr_entry_flag)) {
duke@435 2600 // we're about to parse the osr entry block, so make sure
duke@435 2601 // we setup the OSR edge leading into this block so that
duke@435 2602 // Phis get setup correctly.
duke@435 2603 setup_osr_entry_block();
duke@435 2604 // this is no longer the osr entry block, so clear it.
duke@435 2605 b->clear(BlockBegin::osr_entry_flag);
duke@435 2606 }
duke@435 2607 b->set(BlockBegin::was_visited_flag);
duke@435 2608 connect_to_end(b);
duke@435 2609 }
duke@435 2610 }
duke@435 2611 }
duke@435 2612 } while (!bailed_out() && !scope_data()->is_work_list_empty());
duke@435 2613 }
duke@435 2614
duke@435 2615
duke@435 2616 bool GraphBuilder::_can_trap [Bytecodes::number_of_java_codes];
duke@435 2617
duke@435 2618 void GraphBuilder::initialize() {
duke@435 2619 // the following bytecodes are assumed to potentially
duke@435 2620 // throw exceptions in compiled code - note that e.g.
duke@435 2621 // monitorexit & the return bytecodes do not throw
duke@435 2622 // exceptions since monitor pairing proved that they
duke@435 2623 // succeed (if monitor pairing succeeded)
duke@435 2624 Bytecodes::Code can_trap_list[] =
duke@435 2625 { Bytecodes::_ldc
duke@435 2626 , Bytecodes::_ldc_w
duke@435 2627 , Bytecodes::_ldc2_w
duke@435 2628 , Bytecodes::_iaload
duke@435 2629 , Bytecodes::_laload
duke@435 2630 , Bytecodes::_faload
duke@435 2631 , Bytecodes::_daload
duke@435 2632 , Bytecodes::_aaload
duke@435 2633 , Bytecodes::_baload
duke@435 2634 , Bytecodes::_caload
duke@435 2635 , Bytecodes::_saload
duke@435 2636 , Bytecodes::_iastore
duke@435 2637 , Bytecodes::_lastore
duke@435 2638 , Bytecodes::_fastore
duke@435 2639 , Bytecodes::_dastore
duke@435 2640 , Bytecodes::_aastore
duke@435 2641 , Bytecodes::_bastore
duke@435 2642 , Bytecodes::_castore
duke@435 2643 , Bytecodes::_sastore
duke@435 2644 , Bytecodes::_idiv
duke@435 2645 , Bytecodes::_ldiv
duke@435 2646 , Bytecodes::_irem
duke@435 2647 , Bytecodes::_lrem
duke@435 2648 , Bytecodes::_getstatic
duke@435 2649 , Bytecodes::_putstatic
duke@435 2650 , Bytecodes::_getfield
duke@435 2651 , Bytecodes::_putfield
duke@435 2652 , Bytecodes::_invokevirtual
duke@435 2653 , Bytecodes::_invokespecial
duke@435 2654 , Bytecodes::_invokestatic
jrose@1161 2655 , Bytecodes::_invokedynamic
duke@435 2656 , Bytecodes::_invokeinterface
duke@435 2657 , Bytecodes::_new
duke@435 2658 , Bytecodes::_newarray
duke@435 2659 , Bytecodes::_anewarray
duke@435 2660 , Bytecodes::_arraylength
duke@435 2661 , Bytecodes::_athrow
duke@435 2662 , Bytecodes::_checkcast
duke@435 2663 , Bytecodes::_instanceof
duke@435 2664 , Bytecodes::_monitorenter
duke@435 2665 , Bytecodes::_multianewarray
duke@435 2666 };
duke@435 2667
duke@435 2668 // inititialize trap tables
duke@435 2669 for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {
duke@435 2670 _can_trap[i] = false;
duke@435 2671 }
duke@435 2672 // set standard trap info
duke@435 2673 for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {
duke@435 2674 _can_trap[can_trap_list[j]] = true;
duke@435 2675 }
duke@435 2676 }
duke@435 2677
duke@435 2678
duke@435 2679 BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
duke@435 2680 assert(entry->is_set(f), "entry/flag mismatch");
duke@435 2681 // create header block
duke@435 2682 BlockBegin* h = new BlockBegin(entry->bci());
duke@435 2683 h->set_depth_first_number(0);
duke@435 2684
duke@435 2685 Value l = h;
duke@435 2686 BlockEnd* g = new Goto(entry, false);
duke@435 2687 l->set_next(g, entry->bci());
duke@435 2688 h->set_end(g);
duke@435 2689 h->set(f);
duke@435 2690 // setup header block end state
roland@2174 2691 ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)
duke@435 2692 assert(s->stack_is_empty(), "must have empty stack at entry point");
duke@435 2693 g->set_state(s);
duke@435 2694 return h;
duke@435 2695 }
duke@435 2696
duke@435 2697
duke@435 2698
duke@435 2699 BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry, BlockBegin* osr_entry, ValueStack* state) {
duke@435 2700 BlockBegin* start = new BlockBegin(0);
duke@435 2701
duke@435 2702 // This code eliminates the empty start block at the beginning of
duke@435 2703 // each method. Previously, each method started with the
duke@435 2704 // start-block created below, and this block was followed by the
duke@435 2705 // header block that was always empty. This header block is only
duke@435 2706 // necesary if std_entry is also a backward branch target because
duke@435 2707 // then phi functions may be necessary in the header block. It's
duke@435 2708 // also necessary when profiling so that there's a single block that
duke@435 2709 // can increment the interpreter_invocation_count.
duke@435 2710 BlockBegin* new_header_block;
iveresov@2138 2711 if (std_entry->number_of_preds() > 0 || count_invocations() || count_backedges()) {
iveresov@2138 2712 new_header_block = header_block(std_entry, BlockBegin::std_entry_flag, state);
iveresov@2138 2713 } else {
duke@435 2714 new_header_block = std_entry;
duke@435 2715 }
duke@435 2716
duke@435 2717 // setup start block (root for the IR graph)
duke@435 2718 Base* base =
duke@435 2719 new Base(
duke@435 2720 new_header_block,
duke@435 2721 osr_entry
duke@435 2722 );
duke@435 2723 start->set_next(base, 0);
duke@435 2724 start->set_end(base);
duke@435 2725 // create & setup state for start block
roland@2174 2726 start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
roland@2174 2727 base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
duke@435 2728
duke@435 2729 if (base->std_entry()->state() == NULL) {
duke@435 2730 // setup states for header blocks
duke@435 2731 base->std_entry()->merge(state);
duke@435 2732 }
duke@435 2733
duke@435 2734 assert(base->std_entry()->state() != NULL, "");
duke@435 2735 return start;
duke@435 2736 }
duke@435 2737
duke@435 2738
duke@435 2739 void GraphBuilder::setup_osr_entry_block() {
duke@435 2740 assert(compilation()->is_osr_compile(), "only for osrs");
duke@435 2741
duke@435 2742 int osr_bci = compilation()->osr_bci();
duke@435 2743 ciBytecodeStream s(method());
duke@435 2744 s.reset_to_bci(osr_bci);
duke@435 2745 s.next();
duke@435 2746 scope_data()->set_stream(&s);
duke@435 2747
duke@435 2748 // create a new block to be the osr setup code
duke@435 2749 _osr_entry = new BlockBegin(osr_bci);
duke@435 2750 _osr_entry->set(BlockBegin::osr_entry_flag);
duke@435 2751 _osr_entry->set_depth_first_number(0);
duke@435 2752 BlockBegin* target = bci2block()->at(osr_bci);
duke@435 2753 assert(target != NULL && target->is_set(BlockBegin::osr_entry_flag), "must be there");
duke@435 2754 // the osr entry has no values for locals
duke@435 2755 ValueStack* state = target->state()->copy();
duke@435 2756 _osr_entry->set_state(state);
duke@435 2757
duke@435 2758 kill_all();
duke@435 2759 _block = _osr_entry;
duke@435 2760 _state = _osr_entry->state()->copy();
roland@2174 2761 assert(_state->bci() == osr_bci, "mismatch");
duke@435 2762 _last = _osr_entry;
duke@435 2763 Value e = append(new OsrEntry());
duke@435 2764 e->set_needs_null_check(false);
duke@435 2765
duke@435 2766 // OSR buffer is
duke@435 2767 //
duke@435 2768 // locals[nlocals-1..0]
duke@435 2769 // monitors[number_of_locks-1..0]
duke@435 2770 //
duke@435 2771 // locals is a direct copy of the interpreter frame so in the osr buffer
duke@435 2772 // so first slot in the local array is the last local from the interpreter
duke@435 2773 // and last slot is local[0] (receiver) from the interpreter
duke@435 2774 //
duke@435 2775 // Similarly with locks. The first lock slot in the osr buffer is the nth lock
duke@435 2776 // from the interpreter frame, the nth lock slot in the osr buffer is 0th lock
duke@435 2777 // in the interpreter frame (the method lock if a sync method)
duke@435 2778
duke@435 2779 // Initialize monitors in the compiled activation.
duke@435 2780
duke@435 2781 int index;
duke@435 2782 Value local;
duke@435 2783
duke@435 2784 // find all the locals that the interpreter thinks contain live oops
duke@435 2785 const BitMap live_oops = method()->live_local_oops_at_bci(osr_bci);
duke@435 2786
duke@435 2787 // compute the offset into the locals so that we can treat the buffer
duke@435 2788 // as if the locals were still in the interpreter frame
duke@435 2789 int locals_offset = BytesPerWord * (method()->max_locals() - 1);
duke@435 2790 for_each_local_value(state, index, local) {
duke@435 2791 int offset = locals_offset - (index + local->type()->size() - 1) * BytesPerWord;
duke@435 2792 Value get;
duke@435 2793 if (local->type()->is_object_kind() && !live_oops.at(index)) {
duke@435 2794 // The interpreter thinks this local is dead but the compiler
duke@435 2795 // doesn't so pretend that the interpreter passed in null.
duke@435 2796 get = append(new Constant(objectNull));
duke@435 2797 } else {
duke@435 2798 get = append(new UnsafeGetRaw(as_BasicType(local->type()), e,
duke@435 2799 append(new Constant(new IntConstant(offset))),
duke@435 2800 0,
iveresov@2344 2801 true /*unaligned*/, true /*wide*/));
duke@435 2802 }
duke@435 2803 _state->store_local(index, get);
duke@435 2804 }
duke@435 2805
duke@435 2806 // the storage for the OSR buffer is freed manually in the LIRGenerator.
duke@435 2807
duke@435 2808 assert(state->caller_state() == NULL, "should be top scope");
duke@435 2809 state->clear_locals();
duke@435 2810 Goto* g = new Goto(target, false);
duke@435 2811 append(g);
duke@435 2812 _osr_entry->set_end(g);
duke@435 2813 target->merge(_osr_entry->end()->state());
duke@435 2814
duke@435 2815 scope_data()->set_stream(NULL);
duke@435 2816 }
duke@435 2817
duke@435 2818
duke@435 2819 ValueStack* GraphBuilder::state_at_entry() {
roland@2174 2820 ValueStack* state = new ValueStack(scope(), NULL);
duke@435 2821
duke@435 2822 // Set up locals for receiver
duke@435 2823 int idx = 0;
duke@435 2824 if (!method()->is_static()) {
duke@435 2825 // we should always see the receiver
duke@435 2826 state->store_local(idx, new Local(objectType, idx));
duke@435 2827 idx = 1;
duke@435 2828 }
duke@435 2829
duke@435 2830 // Set up locals for incoming arguments
duke@435 2831 ciSignature* sig = method()->signature();
duke@435 2832 for (int i = 0; i < sig->count(); i++) {
duke@435 2833 ciType* type = sig->type_at(i);
duke@435 2834 BasicType basic_type = type->basic_type();
duke@435 2835 // don't allow T_ARRAY to propagate into locals types
duke@435 2836 if (basic_type == T_ARRAY) basic_type = T_OBJECT;
duke@435 2837 ValueType* vt = as_ValueType(basic_type);
duke@435 2838 state->store_local(idx, new Local(vt, idx));
duke@435 2839 idx += type->size();
duke@435 2840 }
duke@435 2841
duke@435 2842 // lock synchronized method
duke@435 2843 if (method()->is_synchronized()) {
roland@2174 2844 state->lock(NULL);
duke@435 2845 }
duke@435 2846
duke@435 2847 return state;
duke@435 2848 }
duke@435 2849
duke@435 2850
duke@435 2851 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
duke@435 2852 : _scope_data(NULL)
duke@435 2853 , _instruction_count(0)
duke@435 2854 , _osr_entry(NULL)
duke@435 2855 , _memory(new MemoryBuffer())
duke@435 2856 , _compilation(compilation)
duke@435 2857 , _inline_bailout_msg(NULL)
duke@435 2858 {
duke@435 2859 int osr_bci = compilation->osr_bci();
duke@435 2860
duke@435 2861 // determine entry points and bci2block mapping
duke@435 2862 BlockListBuilder blm(compilation, scope, osr_bci);
duke@435 2863 CHECK_BAILOUT();
duke@435 2864
duke@435 2865 BlockList* bci2block = blm.bci2block();
duke@435 2866 BlockBegin* start_block = bci2block->at(0);
duke@435 2867
duke@435 2868 push_root_scope(scope, bci2block, start_block);
duke@435 2869
duke@435 2870 // setup state for std entry
duke@435 2871 _initial_state = state_at_entry();
duke@435 2872 start_block->merge(_initial_state);
duke@435 2873
duke@435 2874 // complete graph
duke@435 2875 _vmap = new ValueMap();
duke@435 2876 switch (scope->method()->intrinsic_id()) {
duke@435 2877 case vmIntrinsics::_dabs : // fall through
duke@435 2878 case vmIntrinsics::_dsqrt : // fall through
duke@435 2879 case vmIntrinsics::_dsin : // fall through
duke@435 2880 case vmIntrinsics::_dcos : // fall through
duke@435 2881 case vmIntrinsics::_dtan : // fall through
duke@435 2882 case vmIntrinsics::_dlog : // fall through
duke@435 2883 case vmIntrinsics::_dlog10 : // fall through
duke@435 2884 {
duke@435 2885 // Compiles where the root method is an intrinsic need a special
duke@435 2886 // compilation environment because the bytecodes for the method
duke@435 2887 // shouldn't be parsed during the compilation, only the special
duke@435 2888 // Intrinsic node should be emitted. If this isn't done the the
duke@435 2889 // code for the inlined version will be different than the root
duke@435 2890 // compiled version which could lead to monotonicity problems on
duke@435 2891 // intel.
duke@435 2892
duke@435 2893 // Set up a stream so that appending instructions works properly.
duke@435 2894 ciBytecodeStream s(scope->method());
duke@435 2895 s.reset_to_bci(0);
duke@435 2896 scope_data()->set_stream(&s);
duke@435 2897 s.next();
duke@435 2898
duke@435 2899 // setup the initial block state
duke@435 2900 _block = start_block;
roland@2174 2901 _state = start_block->state()->copy_for_parsing();
duke@435 2902 _last = start_block;
duke@435 2903 load_local(doubleType, 0);
duke@435 2904
duke@435 2905 // Emit the intrinsic node.
duke@435 2906 bool result = try_inline_intrinsics(scope->method());
duke@435 2907 if (!result) BAILOUT("failed to inline intrinsic");
duke@435 2908 method_return(dpop());
duke@435 2909
duke@435 2910 // connect the begin and end blocks and we're all done.
duke@435 2911 BlockEnd* end = last()->as_BlockEnd();
duke@435 2912 block()->set_end(end);
duke@435 2913 break;
duke@435 2914 }
johnc@2781 2915
johnc@2781 2916 case vmIntrinsics::_Reference_get:
johnc@2781 2917 {
johnc@2781 2918 if (UseG1GC) {
johnc@2781 2919 // With java.lang.ref.reference.get() we must go through the
johnc@2781 2920 // intrinsic - when G1 is enabled - even when get() is the root
johnc@2781 2921 // method of the compile so that, if necessary, the value in
johnc@2781 2922 // the referent field of the reference object gets recorded by
johnc@2781 2923 // the pre-barrier code.
johnc@2781 2924 // Specifically, if G1 is enabled, the value in the referent
johnc@2781 2925 // field is recorded by the G1 SATB pre barrier. This will
johnc@2781 2926 // result in the referent being marked live and the reference
johnc@2781 2927 // object removed from the list of discovered references during
johnc@2781 2928 // reference processing.
johnc@2781 2929
johnc@2781 2930 // Set up a stream so that appending instructions works properly.
johnc@2781 2931 ciBytecodeStream s(scope->method());
johnc@2781 2932 s.reset_to_bci(0);
johnc@2781 2933 scope_data()->set_stream(&s);
johnc@2781 2934 s.next();
johnc@2781 2935
johnc@2781 2936 // setup the initial block state
johnc@2781 2937 _block = start_block;
johnc@2781 2938 _state = start_block->state()->copy_for_parsing();
johnc@2781 2939 _last = start_block;
johnc@2781 2940 load_local(objectType, 0);
johnc@2781 2941
johnc@2781 2942 // Emit the intrinsic node.
johnc@2781 2943 bool result = try_inline_intrinsics(scope->method());
johnc@2781 2944 if (!result) BAILOUT("failed to inline intrinsic");
johnc@2781 2945 method_return(apop());
johnc@2781 2946
johnc@2781 2947 // connect the begin and end blocks and we're all done.
johnc@2781 2948 BlockEnd* end = last()->as_BlockEnd();
johnc@2781 2949 block()->set_end(end);
johnc@2781 2950 break;
johnc@2781 2951 }
johnc@2781 2952 // Otherwise, fall thru
johnc@2781 2953 }
johnc@2781 2954
duke@435 2955 default:
duke@435 2956 scope_data()->add_to_work_list(start_block);
duke@435 2957 iterate_all_blocks();
duke@435 2958 break;
duke@435 2959 }
duke@435 2960 CHECK_BAILOUT();
duke@435 2961
duke@435 2962 _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
duke@435 2963
duke@435 2964 eliminate_redundant_phis(_start);
duke@435 2965
duke@435 2966 NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
duke@435 2967 // for osr compile, bailout if some requirements are not fulfilled
duke@435 2968 if (osr_bci != -1) {
duke@435 2969 BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
duke@435 2970 assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
duke@435 2971
duke@435 2972 // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
duke@435 2973 if (!osr_block->state()->stack_is_empty()) {
duke@435 2974 BAILOUT("stack not empty at OSR entry point");
duke@435 2975 }
duke@435 2976 }
duke@435 2977 #ifndef PRODUCT
duke@435 2978 if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count);
duke@435 2979 #endif
duke@435 2980 }
duke@435 2981
duke@435 2982
roland@2174 2983 ValueStack* GraphBuilder::copy_state_before() {
roland@2174 2984 return copy_state_before_with_bci(bci());
duke@435 2985 }
duke@435 2986
roland@2174 2987 ValueStack* GraphBuilder::copy_state_exhandling() {
roland@2174 2988 return copy_state_exhandling_with_bci(bci());
roland@2174 2989 }
roland@2174 2990
roland@2174 2991 ValueStack* GraphBuilder::copy_state_for_exception() {
roland@2174 2992 return copy_state_for_exception_with_bci(bci());
roland@2174 2993 }
roland@2174 2994
roland@2174 2995 ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {
roland@2174 2996 return state()->copy(ValueStack::StateBefore, bci);
roland@2174 2997 }
roland@2174 2998
roland@2174 2999 ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {
roland@2174 3000 if (!has_handler()) return NULL;
roland@2174 3001 return state()->copy(ValueStack::StateBefore, bci);
roland@2174 3002 }
roland@2174 3003
roland@2174 3004 ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {
roland@2174 3005 ValueStack* s = copy_state_exhandling_with_bci(bci);
roland@2174 3006 if (s == NULL) {
roland@2174 3007 if (_compilation->env()->jvmti_can_access_local_variables()) {
roland@2174 3008 s = state()->copy(ValueStack::ExceptionState, bci);
roland@2174 3009 } else {
roland@2174 3010 s = state()->copy(ValueStack::EmptyExceptionState, bci);
roland@2174 3011 }
roland@2174 3012 }
roland@2174 3013 return s;
roland@2174 3014 }
duke@435 3015
duke@435 3016 int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
duke@435 3017 int recur_level = 0;
duke@435 3018 for (IRScope* s = scope(); s != NULL; s = s->caller()) {
duke@435 3019 if (s->method() == cur_callee) {
duke@435 3020 ++recur_level;
duke@435 3021 }
duke@435 3022 }
duke@435 3023 return recur_level;
duke@435 3024 }
duke@435 3025
duke@435 3026
duke@435 3027 bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known) {
duke@435 3028 // Clear out any existing inline bailout condition
duke@435 3029 clear_inline_bailout();
duke@435 3030
duke@435 3031 if (callee->should_exclude()) {
duke@435 3032 // callee is excluded
duke@435 3033 INLINE_BAILOUT("excluded by CompilerOracle")
duke@435 3034 } else if (!callee->can_be_compiled()) {
duke@435 3035 // callee is not compilable (prob. has breakpoints)
duke@435 3036 INLINE_BAILOUT("not compilable")
duke@435 3037 } else if (callee->intrinsic_id() != vmIntrinsics::_none && try_inline_intrinsics(callee)) {
duke@435 3038 // intrinsics can be native or not
duke@435 3039 return true;
duke@435 3040 } else if (callee->is_native()) {
duke@435 3041 // non-intrinsic natives cannot be inlined
duke@435 3042 INLINE_BAILOUT("non-intrinsic native")
duke@435 3043 } else if (callee->is_abstract()) {
duke@435 3044 INLINE_BAILOUT("abstract")
duke@435 3045 } else {
duke@435 3046 return try_inline_full(callee, holder_known);
duke@435 3047 }
duke@435 3048 }
duke@435 3049
duke@435 3050
duke@435 3051 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
duke@435 3052 if (!InlineNatives ) INLINE_BAILOUT("intrinsic method inlining disabled");
never@1895 3053 if (callee->is_synchronized()) {
never@1895 3054 // We don't currently support any synchronized intrinsics
never@1895 3055 return false;
never@1895 3056 }
never@1895 3057
duke@435 3058 // callee seems like a good candidate
duke@435 3059 // determine id
duke@435 3060 bool preserves_state = false;
duke@435 3061 bool cantrap = true;
duke@435 3062 vmIntrinsics::ID id = callee->intrinsic_id();
duke@435 3063 switch (id) {
duke@435 3064 case vmIntrinsics::_arraycopy :
duke@435 3065 if (!InlineArrayCopy) return false;
duke@435 3066 break;
duke@435 3067
duke@435 3068 case vmIntrinsics::_currentTimeMillis:
duke@435 3069 case vmIntrinsics::_nanoTime:
duke@435 3070 preserves_state = true;
duke@435 3071 cantrap = false;
duke@435 3072 break;
duke@435 3073
duke@435 3074 case vmIntrinsics::_floatToRawIntBits :
duke@435 3075 case vmIntrinsics::_intBitsToFloat :
duke@435 3076 case vmIntrinsics::_doubleToRawLongBits :
duke@435 3077 case vmIntrinsics::_longBitsToDouble :
duke@435 3078 if (!InlineMathNatives) return false;
duke@435 3079 preserves_state = true;
duke@435 3080 cantrap = false;
duke@435 3081 break;
duke@435 3082
duke@435 3083 case vmIntrinsics::_getClass :
duke@435 3084 if (!InlineClassNatives) return false;
duke@435 3085 preserves_state = true;
duke@435 3086 break;
duke@435 3087
duke@435 3088 case vmIntrinsics::_currentThread :
duke@435 3089 if (!InlineThreadNatives) return false;
duke@435 3090 preserves_state = true;
duke@435 3091 cantrap = false;
duke@435 3092 break;
duke@435 3093
duke@435 3094 case vmIntrinsics::_dabs : // fall through
duke@435 3095 case vmIntrinsics::_dsqrt : // fall through
duke@435 3096 case vmIntrinsics::_dsin : // fall through
duke@435 3097 case vmIntrinsics::_dcos : // fall through
duke@435 3098 case vmIntrinsics::_dtan : // fall through
duke@435 3099 case vmIntrinsics::_dlog : // fall through
duke@435 3100 case vmIntrinsics::_dlog10 : // fall through
duke@435 3101 if (!InlineMathNatives) return false;
duke@435 3102 cantrap = false;
duke@435 3103 preserves_state = true;
duke@435 3104 break;
duke@435 3105
duke@435 3106 // sun/misc/AtomicLong.attemptUpdate
duke@435 3107 case vmIntrinsics::_attemptUpdate :
duke@435 3108 if (!VM_Version::supports_cx8()) return false;
duke@435 3109 if (!InlineAtomicLong) return false;
duke@435 3110 preserves_state = true;
duke@435 3111 break;
duke@435 3112
duke@435 3113 // Use special nodes for Unsafe instructions so we can more easily
duke@435 3114 // perform an address-mode optimization on the raw variants
duke@435 3115 case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);
duke@435 3116 case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
duke@435 3117 case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);
duke@435 3118 case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);
duke@435 3119 case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);
duke@435 3120 case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);
duke@435 3121 case vmIntrinsics::_getLong : return append_unsafe_get_obj(callee, T_LONG, false);
duke@435 3122 case vmIntrinsics::_getFloat : return append_unsafe_get_obj(callee, T_FLOAT, false);
duke@435 3123 case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE, false);
duke@435 3124
duke@435 3125 case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT, false);
duke@435 3126 case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);
duke@435 3127 case vmIntrinsics::_putByte : return append_unsafe_put_obj(callee, T_BYTE, false);
duke@435 3128 case vmIntrinsics::_putShort : return append_unsafe_put_obj(callee, T_SHORT, false);
duke@435 3129 case vmIntrinsics::_putChar : return append_unsafe_put_obj(callee, T_CHAR, false);
duke@435 3130 case vmIntrinsics::_putInt : return append_unsafe_put_obj(callee, T_INT, false);
duke@435 3131 case vmIntrinsics::_putLong : return append_unsafe_put_obj(callee, T_LONG, false);
duke@435 3132 case vmIntrinsics::_putFloat : return append_unsafe_put_obj(callee, T_FLOAT, false);
duke@435 3133 case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE, false);
duke@435 3134
duke@435 3135 case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT, true);
duke@435 3136 case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);
duke@435 3137 case vmIntrinsics::_getByteVolatile : return append_unsafe_get_obj(callee, T_BYTE, true);
duke@435 3138 case vmIntrinsics::_getShortVolatile : return append_unsafe_get_obj(callee, T_SHORT, true);
duke@435 3139 case vmIntrinsics::_getCharVolatile : return append_unsafe_get_obj(callee, T_CHAR, true);
duke@435 3140 case vmIntrinsics::_getIntVolatile : return append_unsafe_get_obj(callee, T_INT, true);
duke@435 3141 case vmIntrinsics::_getLongVolatile : return append_unsafe_get_obj(callee, T_LONG, true);
duke@435 3142 case vmIntrinsics::_getFloatVolatile : return append_unsafe_get_obj(callee, T_FLOAT, true);
duke@435 3143 case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE, true);
duke@435 3144
duke@435 3145 case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT, true);
duke@435 3146 case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);
duke@435 3147 case vmIntrinsics::_putByteVolatile : return append_unsafe_put_obj(callee, T_BYTE, true);
duke@435 3148 case vmIntrinsics::_putShortVolatile : return append_unsafe_put_obj(callee, T_SHORT, true);
duke@435 3149 case vmIntrinsics::_putCharVolatile : return append_unsafe_put_obj(callee, T_CHAR, true);
duke@435 3150 case vmIntrinsics::_putIntVolatile : return append_unsafe_put_obj(callee, T_INT, true);
duke@435 3151 case vmIntrinsics::_putLongVolatile : return append_unsafe_put_obj(callee, T_LONG, true);
duke@435 3152 case vmIntrinsics::_putFloatVolatile : return append_unsafe_put_obj(callee, T_FLOAT, true);
duke@435 3153 case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE, true);
duke@435 3154
duke@435 3155 case vmIntrinsics::_getByte_raw : return append_unsafe_get_raw(callee, T_BYTE);
duke@435 3156 case vmIntrinsics::_getShort_raw : return append_unsafe_get_raw(callee, T_SHORT);
duke@435 3157 case vmIntrinsics::_getChar_raw : return append_unsafe_get_raw(callee, T_CHAR);
duke@435 3158 case vmIntrinsics::_getInt_raw : return append_unsafe_get_raw(callee, T_INT);
duke@435 3159 case vmIntrinsics::_getLong_raw : return append_unsafe_get_raw(callee, T_LONG);
duke@435 3160 case vmIntrinsics::_getFloat_raw : return append_unsafe_get_raw(callee, T_FLOAT);
duke@435 3161 case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);
duke@435 3162
duke@435 3163 case vmIntrinsics::_putByte_raw : return append_unsafe_put_raw(callee, T_BYTE);
duke@435 3164 case vmIntrinsics::_putShort_raw : return append_unsafe_put_raw(callee, T_SHORT);
duke@435 3165 case vmIntrinsics::_putChar_raw : return append_unsafe_put_raw(callee, T_CHAR);
duke@435 3166 case vmIntrinsics::_putInt_raw : return append_unsafe_put_raw(callee, T_INT);
duke@435 3167 case vmIntrinsics::_putLong_raw : return append_unsafe_put_raw(callee, T_LONG);
duke@435 3168 case vmIntrinsics::_putFloat_raw : return append_unsafe_put_raw(callee, T_FLOAT);
duke@435 3169 case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);
duke@435 3170
duke@435 3171 case vmIntrinsics::_prefetchRead : return append_unsafe_prefetch(callee, false, false);
duke@435 3172 case vmIntrinsics::_prefetchWrite : return append_unsafe_prefetch(callee, false, true);
duke@435 3173 case vmIntrinsics::_prefetchReadStatic : return append_unsafe_prefetch(callee, true, false);
duke@435 3174 case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true, true);
duke@435 3175
duke@435 3176 case vmIntrinsics::_checkIndex :
duke@435 3177 if (!InlineNIOCheckIndex) return false;
duke@435 3178 preserves_state = true;
duke@435 3179 break;
duke@435 3180 case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT, true);
duke@435 3181 case vmIntrinsics::_putOrderedInt : return append_unsafe_put_obj(callee, T_INT, true);
duke@435 3182 case vmIntrinsics::_putOrderedLong : return append_unsafe_put_obj(callee, T_LONG, true);
duke@435 3183
duke@435 3184 case vmIntrinsics::_compareAndSwapLong:
duke@435 3185 if (!VM_Version::supports_cx8()) return false;
duke@435 3186 // fall through
duke@435 3187 case vmIntrinsics::_compareAndSwapInt:
duke@435 3188 case vmIntrinsics::_compareAndSwapObject:
duke@435 3189 append_unsafe_CAS(callee);
duke@435 3190 return true;
duke@435 3191
johnc@2781 3192 case vmIntrinsics::_Reference_get:
johnc@2781 3193 // It is only when G1 is enabled that we absolutely
johnc@2781 3194 // need to use the intrinsic version of Reference.get()
johnc@2781 3195 // so that the value in the referent field, if necessary,
johnc@2781 3196 // can be registered by the pre-barrier code.
johnc@2781 3197 if (!UseG1GC) return false;
johnc@2781 3198 preserves_state = true;
johnc@2781 3199 break;
johnc@2781 3200
duke@435 3201 default : return false; // do not inline
duke@435 3202 }
duke@435 3203 // create intrinsic node
duke@435 3204 const bool has_receiver = !callee->is_static();
duke@435 3205 ValueType* result_type = as_ValueType(callee->return_type());
roland@2174 3206 ValueStack* state_before = copy_state_for_exception();
duke@435 3207
duke@435 3208 Values* args = state()->pop_arguments(callee->arg_size());
iveresov@2138 3209
iveresov@2138 3210 if (is_profiling()) {
duke@435 3211 // Don't profile in the special case where the root method
duke@435 3212 // is the intrinsic
duke@435 3213 if (callee != method()) {
iveresov@2138 3214 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2138 3215 compilation()->set_would_profile(true);
iveresov@2138 3216 if (profile_calls()) {
iveresov@2138 3217 Value recv = NULL;
iveresov@2138 3218 if (has_receiver) {
iveresov@2138 3219 recv = args->at(0);
iveresov@2138 3220 null_check(recv);
iveresov@2138 3221 }
iveresov@2138 3222 profile_call(recv, NULL);
duke@435 3223 }
duke@435 3224 }
duke@435 3225 }
duke@435 3226
roland@2174 3227 Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
duke@435 3228 preserves_state, cantrap);
duke@435 3229 // append instruction & push result
duke@435 3230 Value value = append_split(result);
duke@435 3231 if (result_type != voidType) push(result_type, value);
duke@435 3232
duke@435 3233 #ifndef PRODUCT
duke@435 3234 // printing
duke@435 3235 if (PrintInlining) {
duke@435 3236 print_inline_result(callee, true);
duke@435 3237 }
duke@435 3238 #endif
duke@435 3239
duke@435 3240 // done
duke@435 3241 return true;
duke@435 3242 }
duke@435 3243
duke@435 3244
duke@435 3245 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
duke@435 3246 // Introduce a new callee continuation point - all Ret instructions
duke@435 3247 // will be replaced with Gotos to this point.
duke@435 3248 BlockBegin* cont = block_at(next_bci());
duke@435 3249 assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
duke@435 3250
duke@435 3251 // Note: can not assign state to continuation yet, as we have to
duke@435 3252 // pick up the state from the Ret instructions.
duke@435 3253
duke@435 3254 // Push callee scope
duke@435 3255 push_scope_for_jsr(cont, jsr_dest_bci);
duke@435 3256
duke@435 3257 // Temporarily set up bytecode stream so we can append instructions
duke@435 3258 // (only using the bci of this stream)
duke@435 3259 scope_data()->set_stream(scope_data()->parent()->stream());
duke@435 3260
duke@435 3261 BlockBegin* jsr_start_block = block_at(jsr_dest_bci);
duke@435 3262 assert(jsr_start_block != NULL, "jsr start block must exist");
duke@435 3263 assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");
duke@435 3264 Goto* goto_sub = new Goto(jsr_start_block, false);
duke@435 3265 // Must copy state to avoid wrong sharing when parsing bytecodes
duke@435 3266 assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");
roland@2174 3267 jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));
duke@435 3268 append(goto_sub);
duke@435 3269 _block->set_end(goto_sub);
duke@435 3270 _last = _block = jsr_start_block;
duke@435 3271
duke@435 3272 // Clear out bytecode stream
duke@435 3273 scope_data()->set_stream(NULL);
duke@435 3274
duke@435 3275 scope_data()->add_to_work_list(jsr_start_block);
duke@435 3276
duke@435 3277 // Ready to resume parsing in subroutine
duke@435 3278 iterate_all_blocks();
duke@435 3279
duke@435 3280 // If we bailed out during parsing, return immediately (this is bad news)
duke@435 3281 CHECK_BAILOUT_(false);
duke@435 3282
duke@435 3283 // Detect whether the continuation can actually be reached. If not,
duke@435 3284 // it has not had state set by the join() operations in
duke@435 3285 // iterate_bytecodes_for_block()/ret() and we should not touch the
duke@435 3286 // iteration state. The calling activation of
duke@435 3287 // iterate_bytecodes_for_block will then complete normally.
duke@435 3288 if (cont->state() != NULL) {
duke@435 3289 if (!cont->is_set(BlockBegin::was_visited_flag)) {
duke@435 3290 // add continuation to work list instead of parsing it immediately
duke@435 3291 scope_data()->parent()->add_to_work_list(cont);
duke@435 3292 }
duke@435 3293 }
duke@435 3294
duke@435 3295 assert(jsr_continuation() == cont, "continuation must not have changed");
duke@435 3296 assert(!jsr_continuation()->is_set(BlockBegin::was_visited_flag) ||
duke@435 3297 jsr_continuation()->is_set(BlockBegin::parser_loop_header_flag),
duke@435 3298 "continuation can only be visited in case of backward branches");
duke@435 3299 assert(_last && _last->as_BlockEnd(), "block must have end");
duke@435 3300
duke@435 3301 // continuation is in work list, so end iteration of current block
duke@435 3302 _skip_block = true;
duke@435 3303 pop_scope_for_jsr();
duke@435 3304
duke@435 3305 return true;
duke@435 3306 }
duke@435 3307
duke@435 3308
duke@435 3309 // Inline the entry of a synchronized method as a monitor enter and
duke@435 3310 // register the exception handler which releases the monitor if an
duke@435 3311 // exception is thrown within the callee. Note that the monitor enter
duke@435 3312 // cannot throw an exception itself, because the receiver is
duke@435 3313 // guaranteed to be non-null by the explicit null check at the
duke@435 3314 // beginning of inlining.
duke@435 3315 void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {
duke@435 3316 assert(lock != NULL && sync_handler != NULL, "lock or handler missing");
duke@435 3317
duke@435 3318 monitorenter(lock, SynchronizationEntryBCI);
duke@435 3319 assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");
duke@435 3320 _last->set_needs_null_check(false);
duke@435 3321
duke@435 3322 sync_handler->set(BlockBegin::exception_entry_flag);
duke@435 3323 sync_handler->set(BlockBegin::is_on_work_list_flag);
duke@435 3324
duke@435 3325 ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
duke@435 3326 XHandler* h = new XHandler(desc);
duke@435 3327 h->set_entry_block(sync_handler);
duke@435 3328 scope_data()->xhandlers()->append(h);
duke@435 3329 scope_data()->set_has_handler();
duke@435 3330 }
duke@435 3331
duke@435 3332
duke@435 3333 // If an exception is thrown and not handled within an inlined
duke@435 3334 // synchronized method, the monitor must be released before the
duke@435 3335 // exception is rethrown in the outer scope. Generate the appropriate
duke@435 3336 // instructions here.
duke@435 3337 void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler) {
duke@435 3338 BlockBegin* orig_block = _block;
duke@435 3339 ValueStack* orig_state = _state;
duke@435 3340 Instruction* orig_last = _last;
duke@435 3341 _last = _block = sync_handler;
duke@435 3342 _state = sync_handler->state()->copy();
duke@435 3343
duke@435 3344 assert(sync_handler != NULL, "handler missing");
duke@435 3345 assert(!sync_handler->is_set(BlockBegin::was_visited_flag), "is visited here");
duke@435 3346
duke@435 3347 assert(lock != NULL || default_handler, "lock or handler missing");
duke@435 3348
duke@435 3349 XHandler* h = scope_data()->xhandlers()->remove_last();
duke@435 3350 assert(h->entry_block() == sync_handler, "corrupt list of handlers");
duke@435 3351
duke@435 3352 block()->set(BlockBegin::was_visited_flag);
duke@435 3353 Value exception = append_with_bci(new ExceptionObject(), SynchronizationEntryBCI);
duke@435 3354 assert(exception->is_pinned(), "must be");
duke@435 3355
never@2600 3356 int bci = SynchronizationEntryBCI;
never@2486 3357 if (compilation()->env()->dtrace_method_probes()) {
never@2600 3358 // Report exit from inline methods. We don't have a stream here
never@2600 3359 // so pass an explicit bci of SynchronizationEntryBCI.
never@2486 3360 Values* args = new Values(1);
never@2600 3361 args->push(append_with_bci(new Constant(new ObjectConstant(method())), bci));
never@2600 3362 append_with_bci(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args), bci);
never@2486 3363 }
never@2486 3364
duke@435 3365 if (lock) {
duke@435 3366 assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");
roland@2174 3367 if (!lock->is_linked()) {
never@2600 3368 lock = append_with_bci(lock, bci);
duke@435 3369 }
duke@435 3370
duke@435 3371 // exit the monitor in the context of the synchronized method
never@2600 3372 monitorexit(lock, bci);
duke@435 3373
duke@435 3374 // exit the context of the synchronized method
duke@435 3375 if (!default_handler) {
duke@435 3376 pop_scope();
roland@2174 3377 bci = _state->caller_state()->bci();
roland@2174 3378 _state = _state->caller_state()->copy_for_parsing();
duke@435 3379 }
duke@435 3380 }
duke@435 3381
duke@435 3382 // perform the throw as if at the the call site
duke@435 3383 apush(exception);
duke@435 3384 throw_op(bci);
duke@435 3385
duke@435 3386 BlockEnd* end = last()->as_BlockEnd();
duke@435 3387 block()->set_end(end);
duke@435 3388
duke@435 3389 _block = orig_block;
duke@435 3390 _state = orig_state;
duke@435 3391 _last = orig_last;
duke@435 3392 }
duke@435 3393
duke@435 3394
duke@435 3395 bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
duke@435 3396 assert(!callee->is_native(), "callee must not be native");
iveresov@2138 3397 if (count_backedges() && callee->has_loops()) {
iveresov@2138 3398 INLINE_BAILOUT("too complex for tiered");
iveresov@2138 3399 }
duke@435 3400 // first perform tests of things it's not possible to inline
duke@435 3401 if (callee->has_exception_handlers() &&
duke@435 3402 !InlineMethodsWithExceptionHandlers) INLINE_BAILOUT("callee has exception handlers");
duke@435 3403 if (callee->is_synchronized() &&
duke@435 3404 !InlineSynchronizedMethods ) INLINE_BAILOUT("callee is synchronized");
duke@435 3405 if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");
duke@435 3406 if (!callee->has_balanced_monitors()) INLINE_BAILOUT("callee's monitors do not match");
duke@435 3407
duke@435 3408 // Proper inlining of methods with jsrs requires a little more work.
duke@435 3409 if (callee->has_jsrs() ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");
duke@435 3410
duke@435 3411 // now perform tests that are based on flag settings
duke@435 3412 if (inline_level() > MaxInlineLevel ) INLINE_BAILOUT("too-deep inlining");
duke@435 3413 if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("too-deep recursive inlining");
duke@435 3414 if (callee->code_size() > max_inline_size() ) INLINE_BAILOUT("callee is too large");
duke@435 3415
duke@435 3416 // don't inline throwable methods unless the inlining tree is rooted in a throwable class
duke@435 3417 if (callee->name() == ciSymbol::object_initializer_name() &&
duke@435 3418 callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
duke@435 3419 // Throwable constructor call
duke@435 3420 IRScope* top = scope();
duke@435 3421 while (top->caller() != NULL) {
duke@435 3422 top = top->caller();
duke@435 3423 }
duke@435 3424 if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
duke@435 3425 INLINE_BAILOUT("don't inline Throwable constructors");
duke@435 3426 }
duke@435 3427 }
duke@435 3428
duke@435 3429 // When SSE2 is used on intel, then no special handling is needed
duke@435 3430 // for strictfp because the enum-constant is fixed at compile time,
duke@435 3431 // the check for UseSSE2 is needed here
duke@435 3432 if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {
duke@435 3433 INLINE_BAILOUT("caller and callee have different strict fp requirements");
duke@435 3434 }
duke@435 3435
duke@435 3436 if (compilation()->env()->num_inlined_bytecodes() > DesiredMethodLimit) {
duke@435 3437 INLINE_BAILOUT("total inlining greater than DesiredMethodLimit");
duke@435 3438 }
duke@435 3439
iveresov@2349 3440 if (is_profiling() && !callee->ensure_method_data()) {
iveresov@2349 3441 INLINE_BAILOUT("mdo allocation failed");
iveresov@2349 3442 }
duke@435 3443 #ifndef PRODUCT
duke@435 3444 // printing
duke@435 3445 if (PrintInlining) {
duke@435 3446 print_inline_result(callee, true);
duke@435 3447 }
duke@435 3448 #endif
duke@435 3449
duke@435 3450 // NOTE: Bailouts from this point on, which occur at the
duke@435 3451 // GraphBuilder level, do not cause bailout just of the inlining but
duke@435 3452 // in fact of the entire compilation.
duke@435 3453
duke@435 3454 BlockBegin* orig_block = block();
duke@435 3455
duke@435 3456 const int args_base = state()->stack_size() - callee->arg_size();
duke@435 3457 assert(args_base >= 0, "stack underflow during inlining");
duke@435 3458
duke@435 3459 // Insert null check if necessary
duke@435 3460 Value recv = NULL;
duke@435 3461 if (code() != Bytecodes::_invokestatic) {
duke@435 3462 // note: null check must happen even if first instruction of callee does
duke@435 3463 // an implicit null check since the callee is in a different scope
duke@435 3464 // and we must make sure exception handling does the right thing
duke@435 3465 assert(!callee->is_static(), "callee must not be static");
duke@435 3466 assert(callee->arg_size() > 0, "must have at least a receiver");
duke@435 3467 recv = state()->stack_at(args_base);
duke@435 3468 null_check(recv);
duke@435 3469 }
duke@435 3470
iveresov@2138 3471 if (is_profiling()) {
iveresov@2138 3472 // Note that we'd collect profile data in this method if we wanted it.
iveresov@2138 3473 // this may be redundant here...
iveresov@2138 3474 compilation()->set_would_profile(true);
iveresov@2138 3475
iveresov@2138 3476 if (profile_calls()) {
iveresov@2138 3477 profile_call(recv, holder_known ? callee->holder() : NULL);
iveresov@2138 3478 }
iveresov@2138 3479 if (profile_inlined_calls()) {
iveresov@2180 3480 profile_invocation(callee, copy_state_before());
iveresov@2138 3481 }
duke@435 3482 }
duke@435 3483
duke@435 3484 // Introduce a new callee continuation point - if the callee has
duke@435 3485 // more than one return instruction or the return does not allow
duke@435 3486 // fall-through of control flow, all return instructions of the
duke@435 3487 // callee will need to be replaced by Goto's pointing to this
duke@435 3488 // continuation point.
duke@435 3489 BlockBegin* cont = block_at(next_bci());
duke@435 3490 bool continuation_existed = true;
duke@435 3491 if (cont == NULL) {
duke@435 3492 cont = new BlockBegin(next_bci());
duke@435 3493 // low number so that continuation gets parsed as early as possible
duke@435 3494 cont->set_depth_first_number(0);
duke@435 3495 #ifndef PRODUCT
duke@435 3496 if (PrintInitialBlockList) {
duke@435 3497 tty->print_cr("CFG: created block %d (bci %d) as continuation for inline at bci %d",
duke@435 3498 cont->block_id(), cont->bci(), bci());
duke@435 3499 }
duke@435 3500 #endif
duke@435 3501 continuation_existed = false;
duke@435 3502 }
duke@435 3503 // Record number of predecessors of continuation block before
duke@435 3504 // inlining, to detect if inlined method has edges to its
duke@435 3505 // continuation after inlining.
duke@435 3506 int continuation_preds = cont->number_of_preds();
duke@435 3507
duke@435 3508 // Push callee scope
duke@435 3509 push_scope(callee, cont);
duke@435 3510
duke@435 3511 // the BlockListBuilder for the callee could have bailed out
duke@435 3512 CHECK_BAILOUT_(false);
duke@435 3513
duke@435 3514 // Temporarily set up bytecode stream so we can append instructions
duke@435 3515 // (only using the bci of this stream)
duke@435 3516 scope_data()->set_stream(scope_data()->parent()->stream());
duke@435 3517
duke@435 3518 // Pass parameters into callee state: add assignments
duke@435 3519 // note: this will also ensure that all arguments are computed before being passed
duke@435 3520 ValueStack* callee_state = state();
roland@2174 3521 ValueStack* caller_state = state()->caller_state();
duke@435 3522 { int i = args_base;
duke@435 3523 while (i < caller_state->stack_size()) {
duke@435 3524 const int par_no = i - args_base;
duke@435 3525 Value arg = caller_state->stack_at_inc(i);
duke@435 3526 // NOTE: take base() of arg->type() to avoid problems storing
duke@435 3527 // constants
duke@435 3528 store_local(callee_state, arg, arg->type()->base(), par_no);
duke@435 3529 }
duke@435 3530 }
duke@435 3531
duke@435 3532 // Remove args from stack.
duke@435 3533 // Note that we preserve locals state in case we can use it later
duke@435 3534 // (see use of pop_scope() below)
duke@435 3535 caller_state->truncate_stack(args_base);
roland@2174 3536 assert(callee_state->stack_size() == 0, "callee stack must be empty");
duke@435 3537
duke@435 3538 Value lock;
duke@435 3539 BlockBegin* sync_handler;
duke@435 3540
duke@435 3541 // Inline the locking of the receiver if the callee is synchronized
duke@435 3542 if (callee->is_synchronized()) {
duke@435 3543 lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))
duke@435 3544 : state()->local_at(0);
roland@2174 3545 sync_handler = new BlockBegin(SynchronizationEntryBCI);
duke@435 3546 inline_sync_entry(lock, sync_handler);
duke@435 3547 }
duke@435 3548
never@2486 3549 if (compilation()->env()->dtrace_method_probes()) {
never@2486 3550 Values* args = new Values(1);
never@2486 3551 args->push(append(new Constant(new ObjectConstant(method()))));
never@2486 3552 append(new RuntimeCall(voidType, "dtrace_method_entry", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), args));
never@2486 3553 }
duke@435 3554
duke@435 3555 BlockBegin* callee_start_block = block_at(0);
duke@435 3556 if (callee_start_block != NULL) {
duke@435 3557 assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");
duke@435 3558 Goto* goto_callee = new Goto(callee_start_block, false);
duke@435 3559 // The state for this goto is in the scope of the callee, so use
duke@435 3560 // the entry bci for the callee instead of the call site bci.
duke@435 3561 append_with_bci(goto_callee, 0);
duke@435 3562 _block->set_end(goto_callee);
duke@435 3563 callee_start_block->merge(callee_state);
duke@435 3564
duke@435 3565 _last = _block = callee_start_block;
duke@435 3566
duke@435 3567 scope_data()->add_to_work_list(callee_start_block);
duke@435 3568 }
duke@435 3569
duke@435 3570 // Clear out bytecode stream
duke@435 3571 scope_data()->set_stream(NULL);
duke@435 3572
duke@435 3573 // Ready to resume parsing in callee (either in the same block we
duke@435 3574 // were in before or in the callee's start block)
duke@435 3575 iterate_all_blocks(callee_start_block == NULL);
duke@435 3576
duke@435 3577 // If we bailed out during parsing, return immediately (this is bad news)
duke@435 3578 if (bailed_out()) return false;
duke@435 3579
duke@435 3580 // iterate_all_blocks theoretically traverses in random order; in
duke@435 3581 // practice, we have only traversed the continuation if we are
duke@435 3582 // inlining into a subroutine
duke@435 3583 assert(continuation_existed ||
duke@435 3584 !continuation()->is_set(BlockBegin::was_visited_flag),
duke@435 3585 "continuation should not have been parsed yet if we created it");
duke@435 3586
duke@435 3587 // If we bailed out during parsing, return immediately (this is bad news)
duke@435 3588 CHECK_BAILOUT_(false);
duke@435 3589
duke@435 3590 // At this point we are almost ready to return and resume parsing of
duke@435 3591 // the caller back in the GraphBuilder. The only thing we want to do
duke@435 3592 // first is an optimization: during parsing of the callee we
duke@435 3593 // generated at least one Goto to the continuation block. If we
duke@435 3594 // generated exactly one, and if the inlined method spanned exactly
duke@435 3595 // one block (and we didn't have to Goto its entry), then we snip
duke@435 3596 // off the Goto to the continuation, allowing control to fall
duke@435 3597 // through back into the caller block and effectively performing
duke@435 3598 // block merging. This allows load elimination and CSE to take place
duke@435 3599 // across multiple callee scopes if they are relatively simple, and
duke@435 3600 // is currently essential to making inlining profitable.
duke@435 3601 if ( num_returns() == 1
duke@435 3602 && block() == orig_block
duke@435 3603 && block() == inline_cleanup_block()) {
duke@435 3604 _last = inline_cleanup_return_prev();
roland@2174 3605 _state = inline_cleanup_state();
duke@435 3606 } else if (continuation_preds == cont->number_of_preds()) {
duke@435 3607 // Inlining caused that the instructions after the invoke in the
duke@435 3608 // caller are not reachable any more. So skip filling this block
duke@435 3609 // with instructions!
duke@435 3610 assert (cont == continuation(), "");
duke@435 3611 assert(_last && _last->as_BlockEnd(), "");
duke@435 3612 _skip_block = true;
duke@435 3613 } else {
duke@435 3614 // Resume parsing in continuation block unless it was already parsed.
duke@435 3615 // Note that if we don't change _last here, iteration in
duke@435 3616 // iterate_bytecodes_for_block will stop when we return.
duke@435 3617 if (!continuation()->is_set(BlockBegin::was_visited_flag)) {
duke@435 3618 // add continuation to work list instead of parsing it immediately
duke@435 3619 assert(_last && _last->as_BlockEnd(), "");
duke@435 3620 scope_data()->parent()->add_to_work_list(continuation());
duke@435 3621 _skip_block = true;
duke@435 3622 }
duke@435 3623 }
duke@435 3624
duke@435 3625 // Fill the exception handler for synchronized methods with instructions
duke@435 3626 if (callee->is_synchronized() && sync_handler->state() != NULL) {
duke@435 3627 fill_sync_handler(lock, sync_handler);
duke@435 3628 } else {
duke@435 3629 pop_scope();
duke@435 3630 }
duke@435 3631
duke@435 3632 compilation()->notice_inlined_method(callee);
duke@435 3633
duke@435 3634 return true;
duke@435 3635 }
duke@435 3636
duke@435 3637
duke@435 3638 void GraphBuilder::inline_bailout(const char* msg) {
duke@435 3639 assert(msg != NULL, "inline bailout msg must exist");
duke@435 3640 _inline_bailout_msg = msg;
duke@435 3641 }
duke@435 3642
duke@435 3643
duke@435 3644 void GraphBuilder::clear_inline_bailout() {
duke@435 3645 _inline_bailout_msg = NULL;
duke@435 3646 }
duke@435 3647
duke@435 3648
duke@435 3649 void GraphBuilder::push_root_scope(IRScope* scope, BlockList* bci2block, BlockBegin* start) {
duke@435 3650 ScopeData* data = new ScopeData(NULL);
duke@435 3651 data->set_scope(scope);
duke@435 3652 data->set_bci2block(bci2block);
duke@435 3653 _scope_data = data;
duke@435 3654 _block = start;
duke@435 3655 }
duke@435 3656
duke@435 3657
duke@435 3658 void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) {
duke@435 3659 IRScope* callee_scope = new IRScope(compilation(), scope(), bci(), callee, -1, false);
duke@435 3660 scope()->add_callee(callee_scope);
duke@435 3661
duke@435 3662 BlockListBuilder blb(compilation(), callee_scope, -1);
duke@435 3663 CHECK_BAILOUT();
duke@435 3664
duke@435 3665 if (!blb.bci2block()->at(0)->is_set(BlockBegin::parser_loop_header_flag)) {
duke@435 3666 // this scope can be inlined directly into the caller so remove
duke@435 3667 // the block at bci 0.
duke@435 3668 blb.bci2block()->at_put(0, NULL);
duke@435 3669 }
duke@435 3670
roland@2174 3671 set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));
duke@435 3672
duke@435 3673 ScopeData* data = new ScopeData(scope_data());
duke@435 3674 data->set_scope(callee_scope);
duke@435 3675 data->set_bci2block(blb.bci2block());
duke@435 3676 data->set_continuation(continuation);
duke@435 3677 _scope_data = data;
duke@435 3678 }
duke@435 3679
duke@435 3680
duke@435 3681 void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest_bci) {
duke@435 3682 ScopeData* data = new ScopeData(scope_data());
duke@435 3683 data->set_parsing_jsr();
duke@435 3684 data->set_jsr_entry_bci(jsr_dest_bci);
duke@435 3685 data->set_jsr_return_address_local(-1);
duke@435 3686 // Must clone bci2block list as we will be mutating it in order to
duke@435 3687 // properly clone all blocks in jsr region as well as exception
duke@435 3688 // handlers containing rets
duke@435 3689 BlockList* new_bci2block = new BlockList(bci2block()->length());
duke@435 3690 new_bci2block->push_all(bci2block());
duke@435 3691 data->set_bci2block(new_bci2block);
duke@435 3692 data->set_scope(scope());
duke@435 3693 data->setup_jsr_xhandlers();
duke@435 3694 data->set_continuation(continuation());
duke@435 3695 data->set_jsr_continuation(jsr_continuation);
duke@435 3696 _scope_data = data;
duke@435 3697 }
duke@435 3698
duke@435 3699
duke@435 3700 void GraphBuilder::pop_scope() {
duke@435 3701 int number_of_locks = scope()->number_of_locks();
duke@435 3702 _scope_data = scope_data()->parent();
duke@435 3703 // accumulate minimum number of monitor slots to be reserved
duke@435 3704 scope()->set_min_number_of_locks(number_of_locks);
duke@435 3705 }
duke@435 3706
duke@435 3707
duke@435 3708 void GraphBuilder::pop_scope_for_jsr() {
duke@435 3709 _scope_data = scope_data()->parent();
duke@435 3710 }
duke@435 3711
duke@435 3712 bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
duke@435 3713 if (InlineUnsafeOps) {
duke@435 3714 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3715 null_check(args->at(0));
duke@435 3716 Instruction* offset = args->at(2);
duke@435 3717 #ifndef _LP64
duke@435 3718 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
duke@435 3719 #endif
duke@435 3720 Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
duke@435 3721 push(op->type(), op);
duke@435 3722 compilation()->set_has_unsafe_access(true);
duke@435 3723 }
duke@435 3724 return InlineUnsafeOps;
duke@435 3725 }
duke@435 3726
duke@435 3727
duke@435 3728 bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
duke@435 3729 if (InlineUnsafeOps) {
duke@435 3730 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3731 null_check(args->at(0));
duke@435 3732 Instruction* offset = args->at(2);
duke@435 3733 #ifndef _LP64
duke@435 3734 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
duke@435 3735 #endif
duke@435 3736 Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
duke@435 3737 compilation()->set_has_unsafe_access(true);
duke@435 3738 kill_all();
duke@435 3739 }
duke@435 3740 return InlineUnsafeOps;
duke@435 3741 }
duke@435 3742
duke@435 3743
duke@435 3744 bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
duke@435 3745 if (InlineUnsafeOps) {
duke@435 3746 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3747 null_check(args->at(0));
duke@435 3748 Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
duke@435 3749 push(op->type(), op);
duke@435 3750 compilation()->set_has_unsafe_access(true);
duke@435 3751 }
duke@435 3752 return InlineUnsafeOps;
duke@435 3753 }
duke@435 3754
duke@435 3755
duke@435 3756 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
duke@435 3757 if (InlineUnsafeOps) {
duke@435 3758 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3759 null_check(args->at(0));
duke@435 3760 Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
duke@435 3761 compilation()->set_has_unsafe_access(true);
duke@435 3762 }
duke@435 3763 return InlineUnsafeOps;
duke@435 3764 }
duke@435 3765
duke@435 3766
duke@435 3767 bool GraphBuilder::append_unsafe_prefetch(ciMethod* callee, bool is_static, bool is_store) {
duke@435 3768 if (InlineUnsafeOps) {
duke@435 3769 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3770 int obj_arg_index = 1; // Assume non-static case
duke@435 3771 if (is_static) {
duke@435 3772 obj_arg_index = 0;
duke@435 3773 } else {
duke@435 3774 null_check(args->at(0));
duke@435 3775 }
duke@435 3776 Instruction* offset = args->at(obj_arg_index + 1);
duke@435 3777 #ifndef _LP64
duke@435 3778 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
duke@435 3779 #endif
duke@435 3780 Instruction* op = is_store ? append(new UnsafePrefetchWrite(args->at(obj_arg_index), offset))
duke@435 3781 : append(new UnsafePrefetchRead (args->at(obj_arg_index), offset));
duke@435 3782 compilation()->set_has_unsafe_access(true);
duke@435 3783 }
duke@435 3784 return InlineUnsafeOps;
duke@435 3785 }
duke@435 3786
duke@435 3787
duke@435 3788 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
roland@2174 3789 ValueStack* state_before = copy_state_for_exception();
duke@435 3790 ValueType* result_type = as_ValueType(callee->return_type());
duke@435 3791 assert(result_type->is_int(), "int result");
duke@435 3792 Values* args = state()->pop_arguments(callee->arg_size());
duke@435 3793
duke@435 3794 // Pop off some args to speically handle, then push back
duke@435 3795 Value newval = args->pop();
duke@435 3796 Value cmpval = args->pop();
duke@435 3797 Value offset = args->pop();
duke@435 3798 Value src = args->pop();
duke@435 3799 Value unsafe_obj = args->pop();
duke@435 3800
duke@435 3801 // Separately handle the unsafe arg. It is not needed for code
duke@435 3802 // generation, but must be null checked
duke@435 3803 null_check(unsafe_obj);
duke@435 3804
duke@435 3805 #ifndef _LP64
duke@435 3806 offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
duke@435 3807 #endif
duke@435 3808
duke@435 3809 args->push(src);
duke@435 3810 args->push(offset);
duke@435 3811 args->push(cmpval);
duke@435 3812 args->push(newval);
duke@435 3813
duke@435 3814 // An unsafe CAS can alias with other field accesses, but we don't
duke@435 3815 // know which ones so mark the state as no preserved. This will
duke@435 3816 // cause CSE to invalidate memory across it.
duke@435 3817 bool preserves_state = false;
roland@2174 3818 Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);
duke@435 3819 append_split(result);
duke@435 3820 push(result_type, result);
duke@435 3821 compilation()->set_has_unsafe_access(true);
duke@435 3822 }
duke@435 3823
duke@435 3824
duke@435 3825 #ifndef PRODUCT
duke@435 3826 void GraphBuilder::print_inline_result(ciMethod* callee, bool res) {
duke@435 3827 const char sync_char = callee->is_synchronized() ? 's' : ' ';
duke@435 3828 const char exception_char = callee->has_exception_handlers() ? '!' : ' ';
duke@435 3829 const char monitors_char = callee->has_monitor_bytecodes() ? 'm' : ' ';
duke@435 3830 tty->print(" %c%c%c ", sync_char, exception_char, monitors_char);
duke@435 3831 for (int i = 0; i < scope()->level(); i++) tty->print(" ");
duke@435 3832 if (res) {
duke@435 3833 tty->print(" ");
duke@435 3834 } else {
duke@435 3835 tty->print("- ");
duke@435 3836 }
duke@435 3837 tty->print("@ %d ", bci());
duke@435 3838 callee->print_short_name();
duke@435 3839 tty->print(" (%d bytes)", callee->code_size());
duke@435 3840 if (_inline_bailout_msg) {
duke@435 3841 tty->print(" %s", _inline_bailout_msg);
duke@435 3842 }
duke@435 3843 tty->cr();
duke@435 3844
duke@435 3845 if (res && CIPrintMethodCodes) {
duke@435 3846 callee->print_codes();
duke@435 3847 }
duke@435 3848 }
duke@435 3849
duke@435 3850
duke@435 3851 void GraphBuilder::print_stats() {
duke@435 3852 vmap()->print();
duke@435 3853 }
duke@435 3854 #endif // PRODUCT
duke@435 3855
duke@435 3856 void GraphBuilder::profile_call(Value recv, ciKlass* known_holder) {
duke@435 3857 append(new ProfileCall(method(), bci(), recv, known_holder));
duke@435 3858 }
duke@435 3859
iveresov@2180 3860 void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {
iveresov@2180 3861 append(new ProfileInvoke(callee, state));
duke@435 3862 }

mercurial