src/share/vm/c1/c1_GraphBuilder.cpp

Wed, 31 Aug 2011 01:40:45 -0700

author
twisti
date
Wed, 31 Aug 2011 01:40:45 -0700
changeset 3097
de847cac9235
parent 3042
ce3e1d4dc416
child 3100
a32de5085326
permissions
-rw-r--r--

7078382: JSR 292: don't count method handle adapters against inlining budgets
Reviewed-by: kvn, never

     1 /*
     2  * Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
     3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
     4  *
     5  * This code is free software; you can redistribute it and/or modify it
     6  * under the terms of the GNU General Public License version 2 only, as
     7  * published by the Free Software Foundation.
     8  *
     9  * This code is distributed in the hope that it will be useful, but WITHOUT
    10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
    11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
    12  * version 2 for more details (a copy is included in the LICENSE file that
    13  * accompanied this code).
    14  *
    15  * You should have received a copy of the GNU General Public License version
    16  * 2 along with this work; if not, write to the Free Software Foundation,
    17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
    18  *
    19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
    20  * or visit www.oracle.com if you need additional information or have any
    21  * questions.
    22  *
    23  */
    25 #include "precompiled.hpp"
    26 #include "c1/c1_CFGPrinter.hpp"
    27 #include "c1/c1_Canonicalizer.hpp"
    28 #include "c1/c1_Compilation.hpp"
    29 #include "c1/c1_GraphBuilder.hpp"
    30 #include "c1/c1_InstructionPrinter.hpp"
    31 #include "ci/ciField.hpp"
    32 #include "ci/ciKlass.hpp"
    33 #include "compiler/compileBroker.hpp"
    34 #include "interpreter/bytecode.hpp"
    35 #include "runtime/sharedRuntime.hpp"
    36 #include "runtime/compilationPolicy.hpp"
    37 #include "utilities/bitMap.inline.hpp"
    39 class BlockListBuilder VALUE_OBJ_CLASS_SPEC {
    40  private:
    41   Compilation* _compilation;
    42   IRScope*     _scope;
    44   BlockList    _blocks;                // internal list of all blocks
    45   BlockList*   _bci2block;             // mapping from bci to blocks for GraphBuilder
    47   // fields used by mark_loops
    48   BitMap       _active;                // for iteration of control flow graph
    49   BitMap       _visited;               // for iteration of control flow graph
    50   intArray     _loop_map;              // caches the information if a block is contained in a loop
    51   int          _next_loop_index;       // next free loop number
    52   int          _next_block_number;     // for reverse postorder numbering of blocks
    54   // accessors
    55   Compilation*  compilation() const              { return _compilation; }
    56   IRScope*      scope() const                    { return _scope; }
    57   ciMethod*     method() const                   { return scope()->method(); }
    58   XHandlers*    xhandlers() const                { return scope()->xhandlers(); }
    60   // unified bailout support
    61   void          bailout(const char* msg) const   { compilation()->bailout(msg); }
    62   bool          bailed_out() const               { return compilation()->bailed_out(); }
    64   // helper functions
    65   BlockBegin* make_block_at(int bci, BlockBegin* predecessor);
    66   void handle_exceptions(BlockBegin* current, int cur_bci);
    67   void handle_jsr(BlockBegin* current, int sr_bci, int next_bci);
    68   void store_one(BlockBegin* current, int local);
    69   void store_two(BlockBegin* current, int local);
    70   void set_entries(int osr_bci);
    71   void set_leaders();
    73   void make_loop_header(BlockBegin* block);
    74   void mark_loops();
    75   int  mark_loops(BlockBegin* b, bool in_subroutine);
    77   // debugging
    78 #ifndef PRODUCT
    79   void print();
    80 #endif
    82  public:
    83   // creation
    84   BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci);
    86   // accessors for GraphBuilder
    87   BlockList*    bci2block() const                { return _bci2block; }
    88 };
    91 // Implementation of BlockListBuilder
    93 BlockListBuilder::BlockListBuilder(Compilation* compilation, IRScope* scope, int osr_bci)
    94  : _compilation(compilation)
    95  , _scope(scope)
    96  , _blocks(16)
    97  , _bci2block(new BlockList(scope->method()->code_size(), NULL))
    98  , _next_block_number(0)
    99  , _active()         // size not known yet
   100  , _visited()        // size not known yet
   101  , _next_loop_index(0)
   102  , _loop_map() // size not known yet
   103 {
   104   set_entries(osr_bci);
   105   set_leaders();
   106   CHECK_BAILOUT();
   108   mark_loops();
   109   NOT_PRODUCT(if (PrintInitialBlockList) print());
   111 #ifndef PRODUCT
   112   if (PrintCFGToFile) {
   113     stringStream title;
   114     title.print("BlockListBuilder ");
   115     scope->method()->print_name(&title);
   116     CFGPrinter::print_cfg(_bci2block, title.as_string(), false, false);
   117   }
   118 #endif
   119 }
   122 void BlockListBuilder::set_entries(int osr_bci) {
   123   // generate start blocks
   124   BlockBegin* std_entry = make_block_at(0, NULL);
   125   if (scope()->caller() == NULL) {
   126     std_entry->set(BlockBegin::std_entry_flag);
   127   }
   128   if (osr_bci != -1) {
   129     BlockBegin* osr_entry = make_block_at(osr_bci, NULL);
   130     osr_entry->set(BlockBegin::osr_entry_flag);
   131   }
   133   // generate exception entry blocks
   134   XHandlers* list = xhandlers();
   135   const int n = list->length();
   136   for (int i = 0; i < n; i++) {
   137     XHandler* h = list->handler_at(i);
   138     BlockBegin* entry = make_block_at(h->handler_bci(), NULL);
   139     entry->set(BlockBegin::exception_entry_flag);
   140     h->set_entry_block(entry);
   141   }
   142 }
   145 BlockBegin* BlockListBuilder::make_block_at(int cur_bci, BlockBegin* predecessor) {
   146   assert(method()->bci_block_start().at(cur_bci), "wrong block starts of MethodLivenessAnalyzer");
   148   BlockBegin* block = _bci2block->at(cur_bci);
   149   if (block == NULL) {
   150     block = new BlockBegin(cur_bci);
   151     block->init_stores_to_locals(method()->max_locals());
   152     _bci2block->at_put(cur_bci, block);
   153     _blocks.append(block);
   155     assert(predecessor == NULL || predecessor->bci() < cur_bci, "targets for backward branches must already exist");
   156   }
   158   if (predecessor != NULL) {
   159     if (block->is_set(BlockBegin::exception_entry_flag)) {
   160       BAILOUT_("Exception handler can be reached by both normal and exceptional control flow", block);
   161     }
   163     predecessor->add_successor(block);
   164     block->increment_total_preds();
   165   }
   167   return block;
   168 }
   171 inline void BlockListBuilder::store_one(BlockBegin* current, int local) {
   172   current->stores_to_locals().set_bit(local);
   173 }
   174 inline void BlockListBuilder::store_two(BlockBegin* current, int local) {
   175   store_one(current, local);
   176   store_one(current, local + 1);
   177 }
   180 void BlockListBuilder::handle_exceptions(BlockBegin* current, int cur_bci) {
   181   // Draws edges from a block to its exception handlers
   182   XHandlers* list = xhandlers();
   183   const int n = list->length();
   185   for (int i = 0; i < n; i++) {
   186     XHandler* h = list->handler_at(i);
   188     if (h->covers(cur_bci)) {
   189       BlockBegin* entry = h->entry_block();
   190       assert(entry != NULL && entry == _bci2block->at(h->handler_bci()), "entry must be set");
   191       assert(entry->is_set(BlockBegin::exception_entry_flag), "flag must be set");
   193       // add each exception handler only once
   194       if (!current->is_successor(entry)) {
   195         current->add_successor(entry);
   196         entry->increment_total_preds();
   197       }
   199       // stop when reaching catchall
   200       if (h->catch_type() == 0) break;
   201     }
   202   }
   203 }
   205 void BlockListBuilder::handle_jsr(BlockBegin* current, int sr_bci, int next_bci) {
   206   // start a new block after jsr-bytecode and link this block into cfg
   207   make_block_at(next_bci, current);
   209   // start a new block at the subroutine entry at mark it with special flag
   210   BlockBegin* sr_block = make_block_at(sr_bci, current);
   211   if (!sr_block->is_set(BlockBegin::subroutine_entry_flag)) {
   212     sr_block->set(BlockBegin::subroutine_entry_flag);
   213   }
   214 }
   217 void BlockListBuilder::set_leaders() {
   218   bool has_xhandlers = xhandlers()->has_handlers();
   219   BlockBegin* current = NULL;
   221   // The information which bci starts a new block simplifies the analysis
   222   // Without it, backward branches could jump to a bci where no block was created
   223   // during bytecode iteration. This would require the creation of a new block at the
   224   // branch target and a modification of the successor lists.
   225   BitMap bci_block_start = method()->bci_block_start();
   227   ciBytecodeStream s(method());
   228   while (s.next() != ciBytecodeStream::EOBC()) {
   229     int cur_bci = s.cur_bci();
   231     if (bci_block_start.at(cur_bci)) {
   232       current = make_block_at(cur_bci, current);
   233     }
   234     assert(current != NULL, "must have current block");
   236     if (has_xhandlers && GraphBuilder::can_trap(method(), s.cur_bc())) {
   237       handle_exceptions(current, cur_bci);
   238     }
   240     switch (s.cur_bc()) {
   241       // track stores to local variables for selective creation of phi functions
   242       case Bytecodes::_iinc:     store_one(current, s.get_index()); break;
   243       case Bytecodes::_istore:   store_one(current, s.get_index()); break;
   244       case Bytecodes::_lstore:   store_two(current, s.get_index()); break;
   245       case Bytecodes::_fstore:   store_one(current, s.get_index()); break;
   246       case Bytecodes::_dstore:   store_two(current, s.get_index()); break;
   247       case Bytecodes::_astore:   store_one(current, s.get_index()); break;
   248       case Bytecodes::_istore_0: store_one(current, 0); break;
   249       case Bytecodes::_istore_1: store_one(current, 1); break;
   250       case Bytecodes::_istore_2: store_one(current, 2); break;
   251       case Bytecodes::_istore_3: store_one(current, 3); break;
   252       case Bytecodes::_lstore_0: store_two(current, 0); break;
   253       case Bytecodes::_lstore_1: store_two(current, 1); break;
   254       case Bytecodes::_lstore_2: store_two(current, 2); break;
   255       case Bytecodes::_lstore_3: store_two(current, 3); break;
   256       case Bytecodes::_fstore_0: store_one(current, 0); break;
   257       case Bytecodes::_fstore_1: store_one(current, 1); break;
   258       case Bytecodes::_fstore_2: store_one(current, 2); break;
   259       case Bytecodes::_fstore_3: store_one(current, 3); break;
   260       case Bytecodes::_dstore_0: store_two(current, 0); break;
   261       case Bytecodes::_dstore_1: store_two(current, 1); break;
   262       case Bytecodes::_dstore_2: store_two(current, 2); break;
   263       case Bytecodes::_dstore_3: store_two(current, 3); break;
   264       case Bytecodes::_astore_0: store_one(current, 0); break;
   265       case Bytecodes::_astore_1: store_one(current, 1); break;
   266       case Bytecodes::_astore_2: store_one(current, 2); break;
   267       case Bytecodes::_astore_3: store_one(current, 3); break;
   269       // track bytecodes that affect the control flow
   270       case Bytecodes::_athrow:  // fall through
   271       case Bytecodes::_ret:     // fall through
   272       case Bytecodes::_ireturn: // fall through
   273       case Bytecodes::_lreturn: // fall through
   274       case Bytecodes::_freturn: // fall through
   275       case Bytecodes::_dreturn: // fall through
   276       case Bytecodes::_areturn: // fall through
   277       case Bytecodes::_return:
   278         current = NULL;
   279         break;
   281       case Bytecodes::_ifeq:      // fall through
   282       case Bytecodes::_ifne:      // fall through
   283       case Bytecodes::_iflt:      // fall through
   284       case Bytecodes::_ifge:      // fall through
   285       case Bytecodes::_ifgt:      // fall through
   286       case Bytecodes::_ifle:      // fall through
   287       case Bytecodes::_if_icmpeq: // fall through
   288       case Bytecodes::_if_icmpne: // fall through
   289       case Bytecodes::_if_icmplt: // fall through
   290       case Bytecodes::_if_icmpge: // fall through
   291       case Bytecodes::_if_icmpgt: // fall through
   292       case Bytecodes::_if_icmple: // fall through
   293       case Bytecodes::_if_acmpeq: // fall through
   294       case Bytecodes::_if_acmpne: // fall through
   295       case Bytecodes::_ifnull:    // fall through
   296       case Bytecodes::_ifnonnull:
   297         make_block_at(s.next_bci(), current);
   298         make_block_at(s.get_dest(), current);
   299         current = NULL;
   300         break;
   302       case Bytecodes::_goto:
   303         make_block_at(s.get_dest(), current);
   304         current = NULL;
   305         break;
   307       case Bytecodes::_goto_w:
   308         make_block_at(s.get_far_dest(), current);
   309         current = NULL;
   310         break;
   312       case Bytecodes::_jsr:
   313         handle_jsr(current, s.get_dest(), s.next_bci());
   314         current = NULL;
   315         break;
   317       case Bytecodes::_jsr_w:
   318         handle_jsr(current, s.get_far_dest(), s.next_bci());
   319         current = NULL;
   320         break;
   322       case Bytecodes::_tableswitch: {
   323         // set block for each case
   324         Bytecode_tableswitch sw(&s);
   325         int l = sw.length();
   326         for (int i = 0; i < l; i++) {
   327           make_block_at(cur_bci + sw.dest_offset_at(i), current);
   328         }
   329         make_block_at(cur_bci + sw.default_offset(), current);
   330         current = NULL;
   331         break;
   332       }
   334       case Bytecodes::_lookupswitch: {
   335         // set block for each case
   336         Bytecode_lookupswitch sw(&s);
   337         int l = sw.number_of_pairs();
   338         for (int i = 0; i < l; i++) {
   339           make_block_at(cur_bci + sw.pair_at(i).offset(), current);
   340         }
   341         make_block_at(cur_bci + sw.default_offset(), current);
   342         current = NULL;
   343         break;
   344       }
   345     }
   346   }
   347 }
   350 void BlockListBuilder::mark_loops() {
   351   ResourceMark rm;
   353   _active = BitMap(BlockBegin::number_of_blocks());         _active.clear();
   354   _visited = BitMap(BlockBegin::number_of_blocks());        _visited.clear();
   355   _loop_map = intArray(BlockBegin::number_of_blocks(), 0);
   356   _next_loop_index = 0;
   357   _next_block_number = _blocks.length();
   359   // recursively iterate the control flow graph
   360   mark_loops(_bci2block->at(0), false);
   361   assert(_next_block_number >= 0, "invalid block numbers");
   362 }
   364 void BlockListBuilder::make_loop_header(BlockBegin* block) {
   365   if (block->is_set(BlockBegin::exception_entry_flag)) {
   366     // exception edges may look like loops but don't mark them as such
   367     // since it screws up block ordering.
   368     return;
   369   }
   370   if (!block->is_set(BlockBegin::parser_loop_header_flag)) {
   371     block->set(BlockBegin::parser_loop_header_flag);
   373     assert(_loop_map.at(block->block_id()) == 0, "must not be set yet");
   374     assert(0 <= _next_loop_index && _next_loop_index < BitsPerInt, "_next_loop_index is used as a bit-index in integer");
   375     _loop_map.at_put(block->block_id(), 1 << _next_loop_index);
   376     if (_next_loop_index < 31) _next_loop_index++;
   377   } else {
   378     // block already marked as loop header
   379     assert(is_power_of_2((unsigned int)_loop_map.at(block->block_id())), "exactly one bit must be set");
   380   }
   381 }
   383 int BlockListBuilder::mark_loops(BlockBegin* block, bool in_subroutine) {
   384   int block_id = block->block_id();
   386   if (_visited.at(block_id)) {
   387     if (_active.at(block_id)) {
   388       // reached block via backward branch
   389       make_loop_header(block);
   390     }
   391     // return cached loop information for this block
   392     return _loop_map.at(block_id);
   393   }
   395   if (block->is_set(BlockBegin::subroutine_entry_flag)) {
   396     in_subroutine = true;
   397   }
   399   // set active and visited bits before successors are processed
   400   _visited.set_bit(block_id);
   401   _active.set_bit(block_id);
   403   intptr_t loop_state = 0;
   404   for (int i = block->number_of_sux() - 1; i >= 0; i--) {
   405     // recursively process all successors
   406     loop_state |= mark_loops(block->sux_at(i), in_subroutine);
   407   }
   409   // clear active-bit after all successors are processed
   410   _active.clear_bit(block_id);
   412   // reverse-post-order numbering of all blocks
   413   block->set_depth_first_number(_next_block_number);
   414   _next_block_number--;
   416   if (loop_state != 0 || in_subroutine ) {
   417     // block is contained at least in one loop, so phi functions are necessary
   418     // phi functions are also necessary for all locals stored in a subroutine
   419     scope()->requires_phi_function().set_union(block->stores_to_locals());
   420   }
   422   if (block->is_set(BlockBegin::parser_loop_header_flag)) {
   423     int header_loop_state = _loop_map.at(block_id);
   424     assert(is_power_of_2((unsigned)header_loop_state), "exactly one bit must be set");
   426     // If the highest bit is set (i.e. when integer value is negative), the method
   427     // has 32 or more loops. This bit is never cleared because it is used for multiple loops
   428     if (header_loop_state >= 0) {
   429       clear_bits(loop_state, header_loop_state);
   430     }
   431   }
   433   // cache and return loop information for this block
   434   _loop_map.at_put(block_id, loop_state);
   435   return loop_state;
   436 }
   439 #ifndef PRODUCT
   441 int compare_depth_first(BlockBegin** a, BlockBegin** b) {
   442   return (*a)->depth_first_number() - (*b)->depth_first_number();
   443 }
   445 void BlockListBuilder::print() {
   446   tty->print("----- initial block list of BlockListBuilder for method ");
   447   method()->print_short_name();
   448   tty->cr();
   450   // better readability if blocks are sorted in processing order
   451   _blocks.sort(compare_depth_first);
   453   for (int i = 0; i < _blocks.length(); i++) {
   454     BlockBegin* cur = _blocks.at(i);
   455     tty->print("%4d: B%-4d bci: %-4d  preds: %-4d ", cur->depth_first_number(), cur->block_id(), cur->bci(), cur->total_preds());
   457     tty->print(cur->is_set(BlockBegin::std_entry_flag)               ? " std" : "    ");
   458     tty->print(cur->is_set(BlockBegin::osr_entry_flag)               ? " osr" : "    ");
   459     tty->print(cur->is_set(BlockBegin::exception_entry_flag)         ? " ex" : "   ");
   460     tty->print(cur->is_set(BlockBegin::subroutine_entry_flag)        ? " sr" : "   ");
   461     tty->print(cur->is_set(BlockBegin::parser_loop_header_flag)      ? " lh" : "   ");
   463     if (cur->number_of_sux() > 0) {
   464       tty->print("    sux: ");
   465       for (int j = 0; j < cur->number_of_sux(); j++) {
   466         BlockBegin* sux = cur->sux_at(j);
   467         tty->print("B%d ", sux->block_id());
   468       }
   469     }
   470     tty->cr();
   471   }
   472 }
   474 #endif
   477 // A simple growable array of Values indexed by ciFields
   478 class FieldBuffer: public CompilationResourceObj {
   479  private:
   480   GrowableArray<Value> _values;
   482  public:
   483   FieldBuffer() {}
   485   void kill() {
   486     _values.trunc_to(0);
   487   }
   489   Value at(ciField* field) {
   490     assert(field->holder()->is_loaded(), "must be a loaded field");
   491     int offset = field->offset();
   492     if (offset < _values.length()) {
   493       return _values.at(offset);
   494     } else {
   495       return NULL;
   496     }
   497   }
   499   void at_put(ciField* field, Value value) {
   500     assert(field->holder()->is_loaded(), "must be a loaded field");
   501     int offset = field->offset();
   502     _values.at_put_grow(offset, value, NULL);
   503   }
   505 };
   508 // MemoryBuffer is fairly simple model of the current state of memory.
   509 // It partitions memory into several pieces.  The first piece is
   510 // generic memory where little is known about the owner of the memory.
   511 // This is conceptually represented by the tuple <O, F, V> which says
   512 // that the field F of object O has value V.  This is flattened so
   513 // that F is represented by the offset of the field and the parallel
   514 // arrays _objects and _values are used for O and V.  Loads of O.F can
   515 // simply use V.  Newly allocated objects are kept in a separate list
   516 // along with a parallel array for each object which represents the
   517 // current value of its fields.  Stores of the default value to fields
   518 // which have never been stored to before are eliminated since they
   519 // are redundant.  Once newly allocated objects are stored into
   520 // another object or they are passed out of the current compile they
   521 // are treated like generic memory.
   523 class MemoryBuffer: public CompilationResourceObj {
   524  private:
   525   FieldBuffer                 _values;
   526   GrowableArray<Value>        _objects;
   527   GrowableArray<Value>        _newobjects;
   528   GrowableArray<FieldBuffer*> _fields;
   530  public:
   531   MemoryBuffer() {}
   533   StoreField* store(StoreField* st) {
   534     if (!EliminateFieldAccess) {
   535       return st;
   536     }
   538     Value object = st->obj();
   539     Value value = st->value();
   540     ciField* field = st->field();
   541     if (field->holder()->is_loaded()) {
   542       int offset = field->offset();
   543       int index = _newobjects.find(object);
   544       if (index != -1) {
   545         // newly allocated object with no other stores performed on this field
   546         FieldBuffer* buf = _fields.at(index);
   547         if (buf->at(field) == NULL && is_default_value(value)) {
   548 #ifndef PRODUCT
   549           if (PrintIRDuringConstruction && Verbose) {
   550             tty->print_cr("Eliminated store for object %d:", index);
   551             st->print_line();
   552           }
   553 #endif
   554           return NULL;
   555         } else {
   556           buf->at_put(field, value);
   557         }
   558       } else {
   559         _objects.at_put_grow(offset, object, NULL);
   560         _values.at_put(field, value);
   561       }
   563       store_value(value);
   564     } else {
   565       // if we held onto field names we could alias based on names but
   566       // we don't know what's being stored to so kill it all.
   567       kill();
   568     }
   569     return st;
   570   }
   573   // return true if this value correspond to the default value of a field.
   574   bool is_default_value(Value value) {
   575     Constant* con = value->as_Constant();
   576     if (con) {
   577       switch (con->type()->tag()) {
   578         case intTag:    return con->type()->as_IntConstant()->value() == 0;
   579         case longTag:   return con->type()->as_LongConstant()->value() == 0;
   580         case floatTag:  return jint_cast(con->type()->as_FloatConstant()->value()) == 0;
   581         case doubleTag: return jlong_cast(con->type()->as_DoubleConstant()->value()) == jlong_cast(0);
   582         case objectTag: return con->type() == objectNull;
   583         default:  ShouldNotReachHere();
   584       }
   585     }
   586     return false;
   587   }
   590   // return either the actual value of a load or the load itself
   591   Value load(LoadField* load) {
   592     if (!EliminateFieldAccess) {
   593       return load;
   594     }
   596     if (RoundFPResults && UseSSE < 2 && load->type()->is_float_kind()) {
   597       // can't skip load since value might get rounded as a side effect
   598       return load;
   599     }
   601     ciField* field = load->field();
   602     Value object   = load->obj();
   603     if (field->holder()->is_loaded() && !field->is_volatile()) {
   604       int offset = field->offset();
   605       Value result = NULL;
   606       int index = _newobjects.find(object);
   607       if (index != -1) {
   608         result = _fields.at(index)->at(field);
   609       } else if (_objects.at_grow(offset, NULL) == object) {
   610         result = _values.at(field);
   611       }
   612       if (result != NULL) {
   613 #ifndef PRODUCT
   614         if (PrintIRDuringConstruction && Verbose) {
   615           tty->print_cr("Eliminated load: ");
   616           load->print_line();
   617         }
   618 #endif
   619         assert(result->type()->tag() == load->type()->tag(), "wrong types");
   620         return result;
   621       }
   622     }
   623     return load;
   624   }
   626   // Record this newly allocated object
   627   void new_instance(NewInstance* object) {
   628     int index = _newobjects.length();
   629     _newobjects.append(object);
   630     if (_fields.at_grow(index, NULL) == NULL) {
   631       _fields.at_put(index, new FieldBuffer());
   632     } else {
   633       _fields.at(index)->kill();
   634     }
   635   }
   637   void store_value(Value value) {
   638     int index = _newobjects.find(value);
   639     if (index != -1) {
   640       // stored a newly allocated object into another object.
   641       // Assume we've lost track of it as separate slice of memory.
   642       // We could do better by keeping track of whether individual
   643       // fields could alias each other.
   644       _newobjects.remove_at(index);
   645       // pull out the field info and store it at the end up the list
   646       // of field info list to be reused later.
   647       _fields.append(_fields.at(index));
   648       _fields.remove_at(index);
   649     }
   650   }
   652   void kill() {
   653     _newobjects.trunc_to(0);
   654     _objects.trunc_to(0);
   655     _values.kill();
   656   }
   657 };
   660 // Implementation of GraphBuilder's ScopeData
   662 GraphBuilder::ScopeData::ScopeData(ScopeData* parent)
   663   : _parent(parent)
   664   , _bci2block(NULL)
   665   , _scope(NULL)
   666   , _has_handler(false)
   667   , _stream(NULL)
   668   , _work_list(NULL)
   669   , _parsing_jsr(false)
   670   , _jsr_xhandlers(NULL)
   671   , _caller_stack_size(-1)
   672   , _continuation(NULL)
   673   , _num_returns(0)
   674   , _cleanup_block(NULL)
   675   , _cleanup_return_prev(NULL)
   676   , _cleanup_state(NULL)
   677 {
   678   if (parent != NULL) {
   679     _max_inline_size = (intx) ((float) NestedInliningSizeRatio * (float) parent->max_inline_size() / 100.0f);
   680   } else {
   681     _max_inline_size = MaxInlineSize;
   682   }
   683   if (_max_inline_size < MaxTrivialSize) {
   684     _max_inline_size = MaxTrivialSize;
   685   }
   686 }
   689 void GraphBuilder::kill_all() {
   690   if (UseLocalValueNumbering) {
   691     vmap()->kill_all();
   692   }
   693   _memory->kill();
   694 }
   697 BlockBegin* GraphBuilder::ScopeData::block_at(int bci) {
   698   if (parsing_jsr()) {
   699     // It is necessary to clone all blocks associated with a
   700     // subroutine, including those for exception handlers in the scope
   701     // of the method containing the jsr (because those exception
   702     // handlers may contain ret instructions in some cases).
   703     BlockBegin* block = bci2block()->at(bci);
   704     if (block != NULL && block == parent()->bci2block()->at(bci)) {
   705       BlockBegin* new_block = new BlockBegin(block->bci());
   706 #ifndef PRODUCT
   707       if (PrintInitialBlockList) {
   708         tty->print_cr("CFG: cloned block %d (bci %d) as block %d for jsr",
   709                       block->block_id(), block->bci(), new_block->block_id());
   710       }
   711 #endif
   712       // copy data from cloned blocked
   713       new_block->set_depth_first_number(block->depth_first_number());
   714       if (block->is_set(BlockBegin::parser_loop_header_flag)) new_block->set(BlockBegin::parser_loop_header_flag);
   715       // Preserve certain flags for assertion checking
   716       if (block->is_set(BlockBegin::subroutine_entry_flag)) new_block->set(BlockBegin::subroutine_entry_flag);
   717       if (block->is_set(BlockBegin::exception_entry_flag))  new_block->set(BlockBegin::exception_entry_flag);
   719       // copy was_visited_flag to allow early detection of bailouts
   720       // if a block that is used in a jsr has already been visited before,
   721       // it is shared between the normal control flow and a subroutine
   722       // BlockBegin::try_merge returns false when the flag is set, this leads
   723       // to a compilation bailout
   724       if (block->is_set(BlockBegin::was_visited_flag))  new_block->set(BlockBegin::was_visited_flag);
   726       bci2block()->at_put(bci, new_block);
   727       block = new_block;
   728     }
   729     return block;
   730   } else {
   731     return bci2block()->at(bci);
   732   }
   733 }
   736 XHandlers* GraphBuilder::ScopeData::xhandlers() const {
   737   if (_jsr_xhandlers == NULL) {
   738     assert(!parsing_jsr(), "");
   739     return scope()->xhandlers();
   740   }
   741   assert(parsing_jsr(), "");
   742   return _jsr_xhandlers;
   743 }
   746 void GraphBuilder::ScopeData::set_scope(IRScope* scope) {
   747   _scope = scope;
   748   bool parent_has_handler = false;
   749   if (parent() != NULL) {
   750     parent_has_handler = parent()->has_handler();
   751   }
   752   _has_handler = parent_has_handler || scope->xhandlers()->has_handlers();
   753 }
   756 void GraphBuilder::ScopeData::set_inline_cleanup_info(BlockBegin* block,
   757                                                       Instruction* return_prev,
   758                                                       ValueStack* return_state) {
   759   _cleanup_block       = block;
   760   _cleanup_return_prev = return_prev;
   761   _cleanup_state       = return_state;
   762 }
   765 void GraphBuilder::ScopeData::add_to_work_list(BlockBegin* block) {
   766   if (_work_list == NULL) {
   767     _work_list = new BlockList();
   768   }
   770   if (!block->is_set(BlockBegin::is_on_work_list_flag)) {
   771     // Do not start parsing the continuation block while in a
   772     // sub-scope
   773     if (parsing_jsr()) {
   774       if (block == jsr_continuation()) {
   775         return;
   776       }
   777     } else {
   778       if (block == continuation()) {
   779         return;
   780       }
   781     }
   782     block->set(BlockBegin::is_on_work_list_flag);
   783     _work_list->push(block);
   785     sort_top_into_worklist(_work_list, block);
   786   }
   787 }
   790 void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top) {
   791   assert(worklist->top() == top, "");
   792   // sort block descending into work list
   793   const int dfn = top->depth_first_number();
   794   assert(dfn != -1, "unknown depth first number");
   795   int i = worklist->length()-2;
   796   while (i >= 0) {
   797     BlockBegin* b = worklist->at(i);
   798     if (b->depth_first_number() < dfn) {
   799       worklist->at_put(i+1, b);
   800     } else {
   801       break;
   802     }
   803     i --;
   804   }
   805   if (i >= -1) worklist->at_put(i + 1, top);
   806 }
   809 BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {
   810   if (is_work_list_empty()) {
   811     return NULL;
   812   }
   813   return _work_list->pop();
   814 }
   817 bool GraphBuilder::ScopeData::is_work_list_empty() const {
   818   return (_work_list == NULL || _work_list->length() == 0);
   819 }
   822 void GraphBuilder::ScopeData::setup_jsr_xhandlers() {
   823   assert(parsing_jsr(), "");
   824   // clone all the exception handlers from the scope
   825   XHandlers* handlers = new XHandlers(scope()->xhandlers());
   826   const int n = handlers->length();
   827   for (int i = 0; i < n; i++) {
   828     // The XHandlers need to be adjusted to dispatch to the cloned
   829     // handler block instead of the default one but the synthetic
   830     // unlocker needs to be handled specially.  The synthetic unlocker
   831     // should be left alone since there can be only one and all code
   832     // should dispatch to the same one.
   833     XHandler* h = handlers->handler_at(i);
   834     assert(h->handler_bci() != SynchronizationEntryBCI, "must be real");
   835     h->set_entry_block(block_at(h->handler_bci()));
   836   }
   837   _jsr_xhandlers = handlers;
   838 }
   841 int GraphBuilder::ScopeData::num_returns() {
   842   if (parsing_jsr()) {
   843     return parent()->num_returns();
   844   }
   845   return _num_returns;
   846 }
   849 void GraphBuilder::ScopeData::incr_num_returns() {
   850   if (parsing_jsr()) {
   851     parent()->incr_num_returns();
   852   } else {
   853     ++_num_returns;
   854   }
   855 }
   858 // Implementation of GraphBuilder
   860 #define INLINE_BAILOUT(msg)        { inline_bailout(msg); return false; }
   863 void GraphBuilder::load_constant() {
   864   ciConstant con = stream()->get_constant();
   865   if (con.basic_type() == T_ILLEGAL) {
   866     BAILOUT("could not resolve a constant");
   867   } else {
   868     ValueType* t = illegalType;
   869     ValueStack* patch_state = NULL;
   870     switch (con.basic_type()) {
   871       case T_BOOLEAN: t = new IntConstant     (con.as_boolean()); break;
   872       case T_BYTE   : t = new IntConstant     (con.as_byte   ()); break;
   873       case T_CHAR   : t = new IntConstant     (con.as_char   ()); break;
   874       case T_SHORT  : t = new IntConstant     (con.as_short  ()); break;
   875       case T_INT    : t = new IntConstant     (con.as_int    ()); break;
   876       case T_LONG   : t = new LongConstant    (con.as_long   ()); break;
   877       case T_FLOAT  : t = new FloatConstant   (con.as_float  ()); break;
   878       case T_DOUBLE : t = new DoubleConstant  (con.as_double ()); break;
   879       case T_ARRAY  : t = new ArrayConstant   (con.as_object ()->as_array   ()); break;
   880       case T_OBJECT :
   881        {
   882         ciObject* obj = con.as_object();
   883         if (!obj->is_loaded()
   884             || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
   885           patch_state = copy_state_before();
   886           t = new ObjectConstant(obj);
   887         } else {
   888           assert(!obj->is_klass(), "must be java_mirror of klass");
   889           t = new InstanceConstant(obj->as_instance());
   890         }
   891         break;
   892        }
   893       default       : ShouldNotReachHere();
   894     }
   895     Value x;
   896     if (patch_state != NULL) {
   897       x = new Constant(t, patch_state);
   898     } else {
   899       x = new Constant(t);
   900     }
   901     push(t, append(x));
   902   }
   903 }
   906 void GraphBuilder::load_local(ValueType* type, int index) {
   907   Value x = state()->local_at(index);
   908   assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
   909   push(type, x);
   910 }
   913 void GraphBuilder::store_local(ValueType* type, int index) {
   914   Value x = pop(type);
   915   store_local(state(), x, type, index);
   916 }
   919 void GraphBuilder::store_local(ValueStack* state, Value x, ValueType* type, int index) {
   920   if (parsing_jsr()) {
   921     // We need to do additional tracking of the location of the return
   922     // address for jsrs since we don't handle arbitrary jsr/ret
   923     // constructs. Here we are figuring out in which circumstances we
   924     // need to bail out.
   925     if (x->type()->is_address()) {
   926       scope_data()->set_jsr_return_address_local(index);
   928       // Also check parent jsrs (if any) at this time to see whether
   929       // they are using this local. We don't handle skipping over a
   930       // ret.
   931       for (ScopeData* cur_scope_data = scope_data()->parent();
   932            cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
   933            cur_scope_data = cur_scope_data->parent()) {
   934         if (cur_scope_data->jsr_return_address_local() == index) {
   935           BAILOUT("subroutine overwrites return address from previous subroutine");
   936         }
   937       }
   938     } else if (index == scope_data()->jsr_return_address_local()) {
   939       scope_data()->set_jsr_return_address_local(-1);
   940     }
   941   }
   943   state->store_local(index, round_fp(x));
   944 }
   947 void GraphBuilder::load_indexed(BasicType type) {
   948   ValueStack* state_before = copy_state_for_exception();
   949   Value index = ipop();
   950   Value array = apop();
   951   Value length = NULL;
   952   if (CSEArrayLength ||
   953       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
   954       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
   955     length = append(new ArrayLength(array, state_before));
   956   }
   957   push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
   958 }
   961 void GraphBuilder::store_indexed(BasicType type) {
   962   ValueStack* state_before = copy_state_for_exception();
   963   Value value = pop(as_ValueType(type));
   964   Value index = ipop();
   965   Value array = apop();
   966   Value length = NULL;
   967   if (CSEArrayLength ||
   968       (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
   969       (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
   970     length = append(new ArrayLength(array, state_before));
   971   }
   972   StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before);
   973   append(result);
   974   _memory->store_value(value);
   976   if (type == T_OBJECT && is_profiling()) {
   977     // Note that we'd collect profile data in this method if we wanted it.
   978     compilation()->set_would_profile(true);
   980     if (profile_checkcasts()) {
   981       result->set_profiled_method(method());
   982       result->set_profiled_bci(bci());
   983       result->set_should_profile(true);
   984     }
   985   }
   986 }
   989 void GraphBuilder::stack_op(Bytecodes::Code code) {
   990   switch (code) {
   991     case Bytecodes::_pop:
   992       { state()->raw_pop();
   993       }
   994       break;
   995     case Bytecodes::_pop2:
   996       { state()->raw_pop();
   997         state()->raw_pop();
   998       }
   999       break;
  1000     case Bytecodes::_dup:
  1001       { Value w = state()->raw_pop();
  1002         state()->raw_push(w);
  1003         state()->raw_push(w);
  1005       break;
  1006     case Bytecodes::_dup_x1:
  1007       { Value w1 = state()->raw_pop();
  1008         Value w2 = state()->raw_pop();
  1009         state()->raw_push(w1);
  1010         state()->raw_push(w2);
  1011         state()->raw_push(w1);
  1013       break;
  1014     case Bytecodes::_dup_x2:
  1015       { Value w1 = state()->raw_pop();
  1016         Value w2 = state()->raw_pop();
  1017         Value w3 = state()->raw_pop();
  1018         state()->raw_push(w1);
  1019         state()->raw_push(w3);
  1020         state()->raw_push(w2);
  1021         state()->raw_push(w1);
  1023       break;
  1024     case Bytecodes::_dup2:
  1025       { Value w1 = state()->raw_pop();
  1026         Value w2 = state()->raw_pop();
  1027         state()->raw_push(w2);
  1028         state()->raw_push(w1);
  1029         state()->raw_push(w2);
  1030         state()->raw_push(w1);
  1032       break;
  1033     case Bytecodes::_dup2_x1:
  1034       { Value w1 = state()->raw_pop();
  1035         Value w2 = state()->raw_pop();
  1036         Value w3 = state()->raw_pop();
  1037         state()->raw_push(w2);
  1038         state()->raw_push(w1);
  1039         state()->raw_push(w3);
  1040         state()->raw_push(w2);
  1041         state()->raw_push(w1);
  1043       break;
  1044     case Bytecodes::_dup2_x2:
  1045       { Value w1 = state()->raw_pop();
  1046         Value w2 = state()->raw_pop();
  1047         Value w3 = state()->raw_pop();
  1048         Value w4 = state()->raw_pop();
  1049         state()->raw_push(w2);
  1050         state()->raw_push(w1);
  1051         state()->raw_push(w4);
  1052         state()->raw_push(w3);
  1053         state()->raw_push(w2);
  1054         state()->raw_push(w1);
  1056       break;
  1057     case Bytecodes::_swap:
  1058       { Value w1 = state()->raw_pop();
  1059         Value w2 = state()->raw_pop();
  1060         state()->raw_push(w1);
  1061         state()->raw_push(w2);
  1063       break;
  1064     default:
  1065       ShouldNotReachHere();
  1066       break;
  1071 void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {
  1072   Value y = pop(type);
  1073   Value x = pop(type);
  1074   // NOTE: strictfp can be queried from current method since we don't
  1075   // inline methods with differing strictfp bits
  1076   Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);
  1077   // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level
  1078   res = append(res);
  1079   if (method()->is_strict()) {
  1080     res = round_fp(res);
  1082   push(type, res);
  1086 void GraphBuilder::negate_op(ValueType* type) {
  1087   push(type, append(new NegateOp(pop(type))));
  1091 void GraphBuilder::shift_op(ValueType* type, Bytecodes::Code code) {
  1092   Value s = ipop();
  1093   Value x = pop(type);
  1094   // try to simplify
  1095   // Note: This code should go into the canonicalizer as soon as it can
  1096   //       can handle canonicalized forms that contain more than one node.
  1097   if (CanonicalizeNodes && code == Bytecodes::_iushr) {
  1098     // pattern: x >>> s
  1099     IntConstant* s1 = s->type()->as_IntConstant();
  1100     if (s1 != NULL) {
  1101       // pattern: x >>> s1, with s1 constant
  1102       ShiftOp* l = x->as_ShiftOp();
  1103       if (l != NULL && l->op() == Bytecodes::_ishl) {
  1104         // pattern: (a << b) >>> s1
  1105         IntConstant* s0 = l->y()->type()->as_IntConstant();
  1106         if (s0 != NULL) {
  1107           // pattern: (a << s0) >>> s1
  1108           const int s0c = s0->value() & 0x1F; // only the low 5 bits are significant for shifts
  1109           const int s1c = s1->value() & 0x1F; // only the low 5 bits are significant for shifts
  1110           if (s0c == s1c) {
  1111             if (s0c == 0) {
  1112               // pattern: (a << 0) >>> 0 => simplify to: a
  1113               ipush(l->x());
  1114             } else {
  1115               // pattern: (a << s0c) >>> s0c => simplify to: a & m, with m constant
  1116               assert(0 < s0c && s0c < BitsPerInt, "adjust code below to handle corner cases");
  1117               const int m = (1 << (BitsPerInt - s0c)) - 1;
  1118               Value s = append(new Constant(new IntConstant(m)));
  1119               ipush(append(new LogicOp(Bytecodes::_iand, l->x(), s)));
  1121             return;
  1127   // could not simplify
  1128   push(type, append(new ShiftOp(code, x, s)));
  1132 void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) {
  1133   Value y = pop(type);
  1134   Value x = pop(type);
  1135   push(type, append(new LogicOp(code, x, y)));
  1139 void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {
  1140   ValueStack* state_before = copy_state_before();
  1141   Value y = pop(type);
  1142   Value x = pop(type);
  1143   ipush(append(new CompareOp(code, x, y, state_before)));
  1147 void GraphBuilder::convert(Bytecodes::Code op, BasicType from, BasicType to) {
  1148   push(as_ValueType(to), append(new Convert(op, pop(as_ValueType(from)), as_ValueType(to))));
  1152 void GraphBuilder::increment() {
  1153   int index = stream()->get_index();
  1154   int delta = stream()->is_wide() ? (signed short)Bytes::get_Java_u2(stream()->cur_bcp() + 4) : (signed char)(stream()->cur_bcp()[2]);
  1155   load_local(intType, index);
  1156   ipush(append(new Constant(new IntConstant(delta))));
  1157   arithmetic_op(intType, Bytecodes::_iadd);
  1158   store_local(intType, index);
  1162 void GraphBuilder::_goto(int from_bci, int to_bci) {
  1163   Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
  1164   if (is_profiling()) {
  1165     compilation()->set_would_profile(true);
  1167   if (profile_branches()) {
  1168     x->set_profiled_method(method());
  1169     x->set_profiled_bci(bci());
  1170     x->set_should_profile(true);
  1172   append(x);
  1176 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
  1177   BlockBegin* tsux = block_at(stream()->get_dest());
  1178   BlockBegin* fsux = block_at(stream()->next_bci());
  1179   bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
  1180   Instruction *i = append(new If(x, cond, false, y, tsux, fsux, is_bb ? state_before : NULL, is_bb));
  1182   if (is_profiling()) {
  1183     If* if_node = i->as_If();
  1184     if (if_node != NULL) {
  1185       // Note that we'd collect profile data in this method if we wanted it.
  1186       compilation()->set_would_profile(true);
  1187       // At level 2 we need the proper bci to count backedges
  1188       if_node->set_profiled_bci(bci());
  1189       if (profile_branches()) {
  1190         // Successors can be rotated by the canonicalizer, check for this case.
  1191         if_node->set_profiled_method(method());
  1192         if_node->set_should_profile(true);
  1193         if (if_node->tsux() == fsux) {
  1194           if_node->set_swapped(true);
  1197       return;
  1200     // Check if this If was reduced to Goto.
  1201     Goto *goto_node = i->as_Goto();
  1202     if (goto_node != NULL) {
  1203       compilation()->set_would_profile(true);
  1204       if (profile_branches()) {
  1205         goto_node->set_profiled_method(method());
  1206         goto_node->set_profiled_bci(bci());
  1207         goto_node->set_should_profile(true);
  1208         // Find out which successor is used.
  1209         if (goto_node->default_sux() == tsux) {
  1210           goto_node->set_direction(Goto::taken);
  1211         } else if (goto_node->default_sux() == fsux) {
  1212           goto_node->set_direction(Goto::not_taken);
  1213         } else {
  1214           ShouldNotReachHere();
  1217       return;
  1223 void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
  1224   Value y = append(new Constant(intZero));
  1225   ValueStack* state_before = copy_state_before();
  1226   Value x = ipop();
  1227   if_node(x, cond, y, state_before);
  1231 void GraphBuilder::if_null(ValueType* type, If::Condition cond) {
  1232   Value y = append(new Constant(objectNull));
  1233   ValueStack* state_before = copy_state_before();
  1234   Value x = apop();
  1235   if_node(x, cond, y, state_before);
  1239 void GraphBuilder::if_same(ValueType* type, If::Condition cond) {
  1240   ValueStack* state_before = copy_state_before();
  1241   Value y = pop(type);
  1242   Value x = pop(type);
  1243   if_node(x, cond, y, state_before);
  1247 void GraphBuilder::jsr(int dest) {
  1248   // We only handle well-formed jsrs (those which are "block-structured").
  1249   // If the bytecodes are strange (jumping out of a jsr block) then we
  1250   // might end up trying to re-parse a block containing a jsr which
  1251   // has already been activated. Watch for this case and bail out.
  1252   for (ScopeData* cur_scope_data = scope_data();
  1253        cur_scope_data != NULL && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
  1254        cur_scope_data = cur_scope_data->parent()) {
  1255     if (cur_scope_data->jsr_entry_bci() == dest) {
  1256       BAILOUT("too-complicated jsr/ret structure");
  1260   push(addressType, append(new Constant(new AddressConstant(next_bci()))));
  1261   if (!try_inline_jsr(dest)) {
  1262     return; // bailed out while parsing and inlining subroutine
  1267 void GraphBuilder::ret(int local_index) {
  1268   if (!parsing_jsr()) BAILOUT("ret encountered while not parsing subroutine");
  1270   if (local_index != scope_data()->jsr_return_address_local()) {
  1271     BAILOUT("can not handle complicated jsr/ret constructs");
  1274   // Rets simply become (NON-SAFEPOINT) gotos to the jsr continuation
  1275   append(new Goto(scope_data()->jsr_continuation(), false));
  1279 void GraphBuilder::table_switch() {
  1280   Bytecode_tableswitch sw(stream());
  1281   const int l = sw.length();
  1282   if (CanonicalizeNodes && l == 1) {
  1283     // total of 2 successors => use If instead of switch
  1284     // Note: This code should go into the canonicalizer as soon as it can
  1285     //       can handle canonicalized forms that contain more than one node.
  1286     Value key = append(new Constant(new IntConstant(sw.low_key())));
  1287     BlockBegin* tsux = block_at(bci() + sw.dest_offset_at(0));
  1288     BlockBegin* fsux = block_at(bci() + sw.default_offset());
  1289     bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
  1290     ValueStack* state_before = is_bb ? copy_state_before() : NULL;
  1291     append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
  1292   } else {
  1293     // collect successors
  1294     BlockList* sux = new BlockList(l + 1, NULL);
  1295     int i;
  1296     bool has_bb = false;
  1297     for (i = 0; i < l; i++) {
  1298       sux->at_put(i, block_at(bci() + sw.dest_offset_at(i)));
  1299       if (sw.dest_offset_at(i) < 0) has_bb = true;
  1301     // add default successor
  1302     sux->at_put(i, block_at(bci() + sw.default_offset()));
  1303     ValueStack* state_before = has_bb ? copy_state_before() : NULL;
  1304     append(new TableSwitch(ipop(), sux, sw.low_key(), state_before, has_bb));
  1309 void GraphBuilder::lookup_switch() {
  1310   Bytecode_lookupswitch sw(stream());
  1311   const int l = sw.number_of_pairs();
  1312   if (CanonicalizeNodes && l == 1) {
  1313     // total of 2 successors => use If instead of switch
  1314     // Note: This code should go into the canonicalizer as soon as it can
  1315     //       can handle canonicalized forms that contain more than one node.
  1316     // simplify to If
  1317     LookupswitchPair pair = sw.pair_at(0);
  1318     Value key = append(new Constant(new IntConstant(pair.match())));
  1319     BlockBegin* tsux = block_at(bci() + pair.offset());
  1320     BlockBegin* fsux = block_at(bci() + sw.default_offset());
  1321     bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
  1322     ValueStack* state_before = is_bb ? copy_state_before() : NULL;
  1323     append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
  1324   } else {
  1325     // collect successors & keys
  1326     BlockList* sux = new BlockList(l + 1, NULL);
  1327     intArray* keys = new intArray(l, 0);
  1328     int i;
  1329     bool has_bb = false;
  1330     for (i = 0; i < l; i++) {
  1331       LookupswitchPair pair = sw.pair_at(i);
  1332       if (pair.offset() < 0) has_bb = true;
  1333       sux->at_put(i, block_at(bci() + pair.offset()));
  1334       keys->at_put(i, pair.match());
  1336     // add default successor
  1337     sux->at_put(i, block_at(bci() + sw.default_offset()));
  1338     ValueStack* state_before = has_bb ? copy_state_before() : NULL;
  1339     append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));
  1343 void GraphBuilder::call_register_finalizer() {
  1344   // If the receiver requires finalization then emit code to perform
  1345   // the registration on return.
  1347   // Gather some type information about the receiver
  1348   Value receiver = state()->local_at(0);
  1349   assert(receiver != NULL, "must have a receiver");
  1350   ciType* declared_type = receiver->declared_type();
  1351   ciType* exact_type = receiver->exact_type();
  1352   if (exact_type == NULL &&
  1353       receiver->as_Local() &&
  1354       receiver->as_Local()->java_index() == 0) {
  1355     ciInstanceKlass* ik = compilation()->method()->holder();
  1356     if (ik->is_final()) {
  1357       exact_type = ik;
  1358     } else if (UseCHA && !(ik->has_subklass() || ik->is_interface())) {
  1359       // test class is leaf class
  1360       compilation()->dependency_recorder()->assert_leaf_type(ik);
  1361       exact_type = ik;
  1362     } else {
  1363       declared_type = ik;
  1367   // see if we know statically that registration isn't required
  1368   bool needs_check = true;
  1369   if (exact_type != NULL) {
  1370     needs_check = exact_type->as_instance_klass()->has_finalizer();
  1371   } else if (declared_type != NULL) {
  1372     ciInstanceKlass* ik = declared_type->as_instance_klass();
  1373     if (!Dependencies::has_finalizable_subclass(ik)) {
  1374       compilation()->dependency_recorder()->assert_has_no_finalizable_subclasses(ik);
  1375       needs_check = false;
  1379   if (needs_check) {
  1380     // Perform the registration of finalizable objects.
  1381     ValueStack* state_before = copy_state_for_exception();
  1382     load_local(objectType, 0);
  1383     append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
  1384                                state()->pop_arguments(1),
  1385                                true, state_before, true));
  1390 void GraphBuilder::method_return(Value x) {
  1391   if (RegisterFinalizersAtInit &&
  1392       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
  1393     call_register_finalizer();
  1396   // Check to see whether we are inlining. If so, Return
  1397   // instructions become Gotos to the continuation point.
  1398   if (continuation() != NULL) {
  1399     assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet");
  1401     if (compilation()->env()->dtrace_method_probes()) {
  1402       // Report exit from inline methods
  1403       Values* args = new Values(1);
  1404       args->push(append(new Constant(new ObjectConstant(method()))));
  1405       append(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args));
  1408     // If the inlined method is synchronized, the monitor must be
  1409     // released before we jump to the continuation block.
  1410     if (method()->is_synchronized()) {
  1411       assert(state()->locks_size() == 1, "receiver must be locked here");
  1412       monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
  1415     // State at end of inlined method is the state of the caller
  1416     // without the method parameters on stack, including the
  1417     // return value, if any, of the inlined method on operand stack.
  1418     set_state(state()->caller_state()->copy_for_parsing());
  1419     if (x != NULL) {
  1420       state()->push(x->type(), x);
  1422     Goto* goto_callee = new Goto(continuation(), false);
  1424     // See whether this is the first return; if so, store off some
  1425     // of the state for later examination
  1426     if (num_returns() == 0) {
  1427       set_inline_cleanup_info(_block, _last, state());
  1430     // The current bci() is in the wrong scope, so use the bci() of
  1431     // the continuation point.
  1432     append_with_bci(goto_callee, scope_data()->continuation()->bci());
  1433     incr_num_returns();
  1435     return;
  1438   state()->truncate_stack(0);
  1439   if (method()->is_synchronized()) {
  1440     // perform the unlocking before exiting the method
  1441     Value receiver;
  1442     if (!method()->is_static()) {
  1443       receiver = _initial_state->local_at(0);
  1444     } else {
  1445       receiver = append(new Constant(new ClassConstant(method()->holder())));
  1447     append_split(new MonitorExit(receiver, state()->unlock()));
  1450   append(new Return(x));
  1454 void GraphBuilder::access_field(Bytecodes::Code code) {
  1455   bool will_link;
  1456   ciField* field = stream()->get_field(will_link);
  1457   ciInstanceKlass* holder = field->holder();
  1458   BasicType field_type = field->type()->basic_type();
  1459   ValueType* type = as_ValueType(field_type);
  1460   // call will_link again to determine if the field is valid.
  1461   const bool needs_patching = !holder->is_loaded() ||
  1462                               !field->will_link(method()->holder(), code) ||
  1463                               PatchALot;
  1465   ValueStack* state_before = NULL;
  1466   if (!holder->is_initialized() || needs_patching) {
  1467     // save state before instruction for debug info when
  1468     // deoptimization happens during patching
  1469     state_before = copy_state_before();
  1472   Value obj = NULL;
  1473   if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
  1474     if (state_before != NULL) {
  1475       // build a patching constant
  1476       obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
  1477     } else {
  1478       obj = new Constant(new InstanceConstant(holder->java_mirror()));
  1483   const int offset = !needs_patching ? field->offset() : -1;
  1484   switch (code) {
  1485     case Bytecodes::_getstatic: {
  1486       // check for compile-time constants, i.e., initialized static final fields
  1487       Instruction* constant = NULL;
  1488       if (field->is_constant() && !PatchALot) {
  1489         ciConstant field_val = field->constant_value();
  1490         BasicType field_type = field_val.basic_type();
  1491         switch (field_type) {
  1492         case T_ARRAY:
  1493         case T_OBJECT:
  1494           if (field_val.as_object()->should_be_constant()) {
  1495             constant =  new Constant(as_ValueType(field_val));
  1497           break;
  1499         default:
  1500           constant = new Constant(as_ValueType(field_val));
  1503       if (constant != NULL) {
  1504         push(type, append(constant));
  1505       } else {
  1506         if (state_before == NULL) {
  1507           state_before = copy_state_for_exception();
  1509         push(type, append(new LoadField(append(obj), offset, field, true,
  1510                                         state_before, needs_patching)));
  1512       break;
  1514     case Bytecodes::_putstatic:
  1515       { Value val = pop(type);
  1516         if (state_before == NULL) {
  1517           state_before = copy_state_for_exception();
  1519         append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
  1521       break;
  1522     case Bytecodes::_getfield :
  1524         if (state_before == NULL) {
  1525           state_before = copy_state_for_exception();
  1527         LoadField* load = new LoadField(apop(), offset, field, false, state_before, needs_patching);
  1528         Value replacement = !needs_patching ? _memory->load(load) : load;
  1529         if (replacement != load) {
  1530           assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
  1531           push(type, replacement);
  1532         } else {
  1533           push(type, append(load));
  1535         break;
  1538     case Bytecodes::_putfield :
  1539       { Value val = pop(type);
  1540         if (state_before == NULL) {
  1541           state_before = copy_state_for_exception();
  1543         StoreField* store = new StoreField(apop(), offset, field, val, false, state_before, needs_patching);
  1544         if (!needs_patching) store = _memory->store(store);
  1545         if (store != NULL) {
  1546           append(store);
  1549       break;
  1550     default                   :
  1551       ShouldNotReachHere();
  1552       break;
  1557 Dependencies* GraphBuilder::dependency_recorder() const {
  1558   assert(DeoptC1, "need debug information");
  1559   return compilation()->dependency_recorder();
  1563 void GraphBuilder::invoke(Bytecodes::Code code) {
  1564   bool will_link;
  1565   ciMethod* target = stream()->get_method(will_link);
  1566   // we have to make sure the argument size (incl. the receiver)
  1567   // is correct for compilation (the call would fail later during
  1568   // linkage anyway) - was bug (gri 7/28/99)
  1569   if (target->is_loaded() && target->is_static() != (code == Bytecodes::_invokestatic)) BAILOUT("will cause link error");
  1570   ciInstanceKlass* klass = target->holder();
  1572   // check if CHA possible: if so, change the code to invoke_special
  1573   ciInstanceKlass* calling_klass = method()->holder();
  1574   ciKlass* holder = stream()->get_declared_method_holder();
  1575   ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
  1576   ciInstanceKlass* actual_recv = callee_holder;
  1578   // some methods are obviously bindable without any type checks so
  1579   // convert them directly to an invokespecial.
  1580   if (target->is_loaded() && !target->is_abstract() &&
  1581       target->can_be_statically_bound() && code == Bytecodes::_invokevirtual) {
  1582     code = Bytecodes::_invokespecial;
  1585   // NEEDS_CLEANUP
  1586   // I've added the target-is_loaded() test below but I don't really understand
  1587   // how klass->is_loaded() can be true and yet target->is_loaded() is false.
  1588   // this happened while running the JCK invokevirtual tests under doit.  TKR
  1589   ciMethod* cha_monomorphic_target = NULL;
  1590   ciMethod* exact_target = NULL;
  1591   if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded() &&
  1592       !target->is_method_handle_invoke()) {
  1593     Value receiver = NULL;
  1594     ciInstanceKlass* receiver_klass = NULL;
  1595     bool type_is_exact = false;
  1596     // try to find a precise receiver type
  1597     if (will_link && !target->is_static()) {
  1598       int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
  1599       receiver = state()->stack_at(index);
  1600       ciType* type = receiver->exact_type();
  1601       if (type != NULL && type->is_loaded() &&
  1602           type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
  1603         receiver_klass = (ciInstanceKlass*) type;
  1604         type_is_exact = true;
  1606       if (type == NULL) {
  1607         type = receiver->declared_type();
  1608         if (type != NULL && type->is_loaded() &&
  1609             type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
  1610           receiver_klass = (ciInstanceKlass*) type;
  1611           if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {
  1612             // Insert a dependency on this type since
  1613             // find_monomorphic_target may assume it's already done.
  1614             dependency_recorder()->assert_leaf_type(receiver_klass);
  1615             type_is_exact = true;
  1620     if (receiver_klass != NULL && type_is_exact &&
  1621         receiver_klass->is_loaded() && code != Bytecodes::_invokespecial) {
  1622       // If we have the exact receiver type we can bind directly to
  1623       // the method to call.
  1624       exact_target = target->resolve_invoke(calling_klass, receiver_klass);
  1625       if (exact_target != NULL) {
  1626         target = exact_target;
  1627         code = Bytecodes::_invokespecial;
  1630     if (receiver_klass != NULL &&
  1631         receiver_klass->is_subtype_of(actual_recv) &&
  1632         actual_recv->is_initialized()) {
  1633       actual_recv = receiver_klass;
  1636     if ((code == Bytecodes::_invokevirtual && callee_holder->is_initialized()) ||
  1637         (code == Bytecodes::_invokeinterface && callee_holder->is_initialized() && !actual_recv->is_interface())) {
  1638       // Use CHA on the receiver to select a more precise method.
  1639       cha_monomorphic_target = target->find_monomorphic_target(calling_klass, callee_holder, actual_recv);
  1640     } else if (code == Bytecodes::_invokeinterface && callee_holder->is_loaded() && receiver != NULL) {
  1641       // if there is only one implementor of this interface then we
  1642       // may be able bind this invoke directly to the implementing
  1643       // klass but we need both a dependence on the single interface
  1644       // and on the method we bind to.  Additionally since all we know
  1645       // about the receiver type is the it's supposed to implement the
  1646       // interface we have to insert a check that it's the class we
  1647       // expect.  Interface types are not checked by the verifier so
  1648       // they are roughly equivalent to Object.
  1649       ciInstanceKlass* singleton = NULL;
  1650       if (target->holder()->nof_implementors() == 1) {
  1651         singleton = target->holder()->implementor(0);
  1653       if (singleton) {
  1654         cha_monomorphic_target = target->find_monomorphic_target(calling_klass, target->holder(), singleton);
  1655         if (cha_monomorphic_target != NULL) {
  1656           // If CHA is able to bind this invoke then update the class
  1657           // to match that class, otherwise klass will refer to the
  1658           // interface.
  1659           klass = cha_monomorphic_target->holder();
  1660           actual_recv = target->holder();
  1662           // insert a check it's really the expected class.
  1663           CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());
  1664           c->set_incompatible_class_change_check();
  1665           c->set_direct_compare(klass->is_final());
  1666           append_split(c);
  1672   if (cha_monomorphic_target != NULL) {
  1673     if (cha_monomorphic_target->is_abstract()) {
  1674       // Do not optimize for abstract methods
  1675       cha_monomorphic_target = NULL;
  1679   if (cha_monomorphic_target != NULL) {
  1680     if (!(target->is_final_method())) {
  1681       // If we inlined because CHA revealed only a single target method,
  1682       // then we are dependent on that target method not getting overridden
  1683       // by dynamic class loading.  Be sure to test the "static" receiver
  1684       // dest_method here, as opposed to the actual receiver, which may
  1685       // falsely lead us to believe that the receiver is final or private.
  1686       dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);
  1688     code = Bytecodes::_invokespecial;
  1690   // check if we could do inlining
  1691   if (!PatchALot && Inline && klass->is_loaded() &&
  1692       (klass->is_initialized() || klass->is_interface() && target->holder()->is_initialized())
  1693       && target->will_link(klass, callee_holder, code)) {
  1694     // callee is known => check if we have static binding
  1695     assert(target->is_loaded(), "callee must be known");
  1696     if (code == Bytecodes::_invokestatic
  1697      || code == Bytecodes::_invokespecial
  1698      || code == Bytecodes::_invokevirtual && target->is_final_method()
  1699     ) {
  1700       // static binding => check if callee is ok
  1701       ciMethod* inline_target = (cha_monomorphic_target != NULL)
  1702                                   ? cha_monomorphic_target
  1703                                   : target;
  1704       bool res = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL));
  1705       CHECK_BAILOUT();
  1707 #ifndef PRODUCT
  1708       // printing
  1709       if (PrintInlining && !res) {
  1710         // if it was successfully inlined, then it was already printed.
  1711         print_inline_result(inline_target, res);
  1713 #endif
  1714       clear_inline_bailout();
  1715       if (res) {
  1716         // Register dependence if JVMTI has either breakpoint
  1717         // setting or hotswapping of methods capabilities since they may
  1718         // cause deoptimization.
  1719         if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
  1720           dependency_recorder()->assert_evol_method(inline_target);
  1722         return;
  1726   // If we attempted an inline which did not succeed because of a
  1727   // bailout during construction of the callee graph, the entire
  1728   // compilation has to be aborted. This is fairly rare and currently
  1729   // seems to only occur for jasm-generated classes which contain
  1730   // jsr/ret pairs which are not associated with finally clauses and
  1731   // do not have exception handlers in the containing method, and are
  1732   // therefore not caught early enough to abort the inlining without
  1733   // corrupting the graph. (We currently bail out with a non-empty
  1734   // stack at a ret in these situations.)
  1735   CHECK_BAILOUT();
  1737   // inlining not successful => standard invoke
  1738   bool is_loaded = target->is_loaded();
  1739   bool has_receiver =
  1740     code == Bytecodes::_invokespecial   ||
  1741     code == Bytecodes::_invokevirtual   ||
  1742     code == Bytecodes::_invokeinterface;
  1743   bool is_invokedynamic = code == Bytecodes::_invokedynamic;
  1744   ValueType* result_type = as_ValueType(target->return_type());
  1746   // We require the debug info to be the "state before" because
  1747   // invokedynamics may deoptimize.
  1748   ValueStack* state_before = is_invokedynamic ? copy_state_before() : copy_state_exhandling();
  1750   Values* args = state()->pop_arguments(target->arg_size_no_receiver());
  1751   Value recv = has_receiver ? apop() : NULL;
  1752   int vtable_index = methodOopDesc::invalid_vtable_index;
  1754 #ifdef SPARC
  1755   // Currently only supported on Sparc.
  1756   // The UseInlineCaches only controls dispatch to invokevirtuals for
  1757   // loaded classes which we weren't able to statically bind.
  1758   if (!UseInlineCaches && is_loaded && code == Bytecodes::_invokevirtual
  1759       && !target->can_be_statically_bound()) {
  1760     // Find a vtable index if one is available
  1761     vtable_index = target->resolve_vtable_index(calling_klass, callee_holder);
  1763 #endif
  1765   if (recv != NULL &&
  1766       (code == Bytecodes::_invokespecial ||
  1767        !is_loaded || target->is_final())) {
  1768     // invokespecial always needs a NULL check.  invokevirtual where
  1769     // the target is final or where it's not known that whether the
  1770     // target is final requires a NULL check.  Otherwise normal
  1771     // invokevirtual will perform the null check during the lookup
  1772     // logic or the unverified entry point.  Profiling of calls
  1773     // requires that the null check is performed in all cases.
  1774     null_check(recv);
  1777   if (is_profiling()) {
  1778     if (recv != NULL && profile_calls()) {
  1779       null_check(recv);
  1781     // Note that we'd collect profile data in this method if we wanted it.
  1782     compilation()->set_would_profile(true);
  1784     if (profile_calls()) {
  1785       assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
  1786       ciKlass* target_klass = NULL;
  1787       if (cha_monomorphic_target != NULL) {
  1788         target_klass = cha_monomorphic_target->holder();
  1789       } else if (exact_target != NULL) {
  1790         target_klass = exact_target->holder();
  1792       profile_call(recv, target_klass);
  1796   Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
  1797   // push result
  1798   append_split(result);
  1800   if (result_type != voidType) {
  1801     if (method()->is_strict()) {
  1802       push(result_type, round_fp(result));
  1803     } else {
  1804       push(result_type, result);
  1810 void GraphBuilder::new_instance(int klass_index) {
  1811   ValueStack* state_before = copy_state_exhandling();
  1812   bool will_link;
  1813   ciKlass* klass = stream()->get_klass(will_link);
  1814   assert(klass->is_instance_klass(), "must be an instance klass");
  1815   NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
  1816   _memory->new_instance(new_instance);
  1817   apush(append_split(new_instance));
  1821 void GraphBuilder::new_type_array() {
  1822   ValueStack* state_before = copy_state_exhandling();
  1823   apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
  1827 void GraphBuilder::new_object_array() {
  1828   bool will_link;
  1829   ciKlass* klass = stream()->get_klass(will_link);
  1830   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
  1831   NewArray* n = new NewObjectArray(klass, ipop(), state_before);
  1832   apush(append_split(n));
  1836 bool GraphBuilder::direct_compare(ciKlass* k) {
  1837   if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
  1838     ciInstanceKlass* ik = k->as_instance_klass();
  1839     if (ik->is_final()) {
  1840       return true;
  1841     } else {
  1842       if (DeoptC1 && UseCHA && !(ik->has_subklass() || ik->is_interface())) {
  1843         // test class is leaf class
  1844         dependency_recorder()->assert_leaf_type(ik);
  1845         return true;
  1849   return false;
  1853 void GraphBuilder::check_cast(int klass_index) {
  1854   bool will_link;
  1855   ciKlass* klass = stream()->get_klass(will_link);
  1856   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
  1857   CheckCast* c = new CheckCast(klass, apop(), state_before);
  1858   apush(append_split(c));
  1859   c->set_direct_compare(direct_compare(klass));
  1861   if (is_profiling()) {
  1862     // Note that we'd collect profile data in this method if we wanted it.
  1863     compilation()->set_would_profile(true);
  1865     if (profile_checkcasts()) {
  1866       c->set_profiled_method(method());
  1867       c->set_profiled_bci(bci());
  1868       c->set_should_profile(true);
  1874 void GraphBuilder::instance_of(int klass_index) {
  1875   bool will_link;
  1876   ciKlass* klass = stream()->get_klass(will_link);
  1877   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
  1878   InstanceOf* i = new InstanceOf(klass, apop(), state_before);
  1879   ipush(append_split(i));
  1880   i->set_direct_compare(direct_compare(klass));
  1882   if (is_profiling()) {
  1883     // Note that we'd collect profile data in this method if we wanted it.
  1884     compilation()->set_would_profile(true);
  1886     if (profile_checkcasts()) {
  1887       i->set_profiled_method(method());
  1888       i->set_profiled_bci(bci());
  1889       i->set_should_profile(true);
  1895 void GraphBuilder::monitorenter(Value x, int bci) {
  1896   // save state before locking in case of deoptimization after a NullPointerException
  1897   ValueStack* state_before = copy_state_for_exception_with_bci(bci);
  1898   append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
  1899   kill_all();
  1903 void GraphBuilder::monitorexit(Value x, int bci) {
  1904   append_with_bci(new MonitorExit(x, state()->unlock()), bci);
  1905   kill_all();
  1909 void GraphBuilder::new_multi_array(int dimensions) {
  1910   bool will_link;
  1911   ciKlass* klass = stream()->get_klass(will_link);
  1912   ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
  1914   Values* dims = new Values(dimensions, NULL);
  1915   // fill in all dimensions
  1916   int i = dimensions;
  1917   while (i-- > 0) dims->at_put(i, ipop());
  1918   // create array
  1919   NewArray* n = new NewMultiArray(klass, dims, state_before);
  1920   apush(append_split(n));
  1924 void GraphBuilder::throw_op(int bci) {
  1925   // We require that the debug info for a Throw be the "state before"
  1926   // the Throw (i.e., exception oop is still on TOS)
  1927   ValueStack* state_before = copy_state_before_with_bci(bci);
  1928   Throw* t = new Throw(apop(), state_before);
  1929   // operand stack not needed after a throw
  1930   state()->truncate_stack(0);
  1931   append_with_bci(t, bci);
  1935 Value GraphBuilder::round_fp(Value fp_value) {
  1936   // no rounding needed if SSE2 is used
  1937   if (RoundFPResults && UseSSE < 2) {
  1938     // Must currently insert rounding node for doubleword values that
  1939     // are results of expressions (i.e., not loads from memory or
  1940     // constants)
  1941     if (fp_value->type()->tag() == doubleTag &&
  1942         fp_value->as_Constant() == NULL &&
  1943         fp_value->as_Local() == NULL &&       // method parameters need no rounding
  1944         fp_value->as_RoundFP() == NULL) {
  1945       return append(new RoundFP(fp_value));
  1948   return fp_value;
  1952 Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {
  1953   Canonicalizer canon(compilation(), instr, bci);
  1954   Instruction* i1 = canon.canonical();
  1955   if (i1->is_linked() || !i1->can_be_linked()) {
  1956     // Canonicalizer returned an instruction which was already
  1957     // appended so simply return it.
  1958     return i1;
  1961   if (UseLocalValueNumbering) {
  1962     // Lookup the instruction in the ValueMap and add it to the map if
  1963     // it's not found.
  1964     Instruction* i2 = vmap()->find_insert(i1);
  1965     if (i2 != i1) {
  1966       // found an entry in the value map, so just return it.
  1967       assert(i2->is_linked(), "should already be linked");
  1968       return i2;
  1970     ValueNumberingEffects vne(vmap());
  1971     i1->visit(&vne);
  1974   // i1 was not eliminated => append it
  1975   assert(i1->next() == NULL, "shouldn't already be linked");
  1976   _last = _last->set_next(i1, canon.bci());
  1978   if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {
  1979     // set the bailout state but complete normal processing.  We
  1980     // might do a little more work before noticing the bailout so we
  1981     // want processing to continue normally until it's noticed.
  1982     bailout("Method and/or inlining is too large");
  1985 #ifndef PRODUCT
  1986   if (PrintIRDuringConstruction) {
  1987     InstructionPrinter ip;
  1988     ip.print_line(i1);
  1989     if (Verbose) {
  1990       state()->print();
  1993 #endif
  1995   // save state after modification of operand stack for StateSplit instructions
  1996   StateSplit* s = i1->as_StateSplit();
  1997   if (s != NULL) {
  1998     if (EliminateFieldAccess) {
  1999       Intrinsic* intrinsic = s->as_Intrinsic();
  2000       if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
  2001         _memory->kill();
  2004     s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));
  2007   // set up exception handlers for this instruction if necessary
  2008   if (i1->can_trap()) {
  2009     i1->set_exception_handlers(handle_exception(i1));
  2010     assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
  2012   return i1;
  2016 Instruction* GraphBuilder::append(Instruction* instr) {
  2017   assert(instr->as_StateSplit() == NULL || instr->as_BlockEnd() != NULL, "wrong append used");
  2018   return append_with_bci(instr, bci());
  2022 Instruction* GraphBuilder::append_split(StateSplit* instr) {
  2023   return append_with_bci(instr, bci());
  2027 void GraphBuilder::null_check(Value value) {
  2028   if (value->as_NewArray() != NULL || value->as_NewInstance() != NULL) {
  2029     return;
  2030   } else {
  2031     Constant* con = value->as_Constant();
  2032     if (con) {
  2033       ObjectType* c = con->type()->as_ObjectType();
  2034       if (c && c->is_loaded()) {
  2035         ObjectConstant* oc = c->as_ObjectConstant();
  2036         if (!oc || !oc->value()->is_null_object()) {
  2037           return;
  2042   append(new NullCheck(value, copy_state_for_exception()));
  2047 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
  2048   if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
  2049     assert(instruction->exception_state() == NULL
  2050            || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
  2051            || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->jvmti_can_access_local_variables()),
  2052            "exception_state should be of exception kind");
  2053     return new XHandlers();
  2056   XHandlers*  exception_handlers = new XHandlers();
  2057   ScopeData*  cur_scope_data = scope_data();
  2058   ValueStack* cur_state = instruction->state_before();
  2059   ValueStack* prev_state = NULL;
  2060   int scope_count = 0;
  2062   assert(cur_state != NULL, "state_before must be set");
  2063   do {
  2064     int cur_bci = cur_state->bci();
  2065     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
  2066     assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
  2068     // join with all potential exception handlers
  2069     XHandlers* list = cur_scope_data->xhandlers();
  2070     const int n = list->length();
  2071     for (int i = 0; i < n; i++) {
  2072       XHandler* h = list->handler_at(i);
  2073       if (h->covers(cur_bci)) {
  2074         // h is a potential exception handler => join it
  2075         compilation()->set_has_exception_handlers(true);
  2077         BlockBegin* entry = h->entry_block();
  2078         if (entry == block()) {
  2079           // It's acceptable for an exception handler to cover itself
  2080           // but we don't handle that in the parser currently.  It's
  2081           // very rare so we bailout instead of trying to handle it.
  2082           BAILOUT_("exception handler covers itself", exception_handlers);
  2084         assert(entry->bci() == h->handler_bci(), "must match");
  2085         assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
  2087         // previously this was a BAILOUT, but this is not necessary
  2088         // now because asynchronous exceptions are not handled this way.
  2089         assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");
  2091         // xhandler start with an empty expression stack
  2092         if (cur_state->stack_size() != 0) {
  2093           cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
  2095         if (instruction->exception_state() == NULL) {
  2096           instruction->set_exception_state(cur_state);
  2099         // Note: Usually this join must work. However, very
  2100         // complicated jsr-ret structures where we don't ret from
  2101         // the subroutine can cause the objects on the monitor
  2102         // stacks to not match because blocks can be parsed twice.
  2103         // The only test case we've seen so far which exhibits this
  2104         // problem is caught by the infinite recursion test in
  2105         // GraphBuilder::jsr() if the join doesn't work.
  2106         if (!entry->try_merge(cur_state)) {
  2107           BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);
  2110         // add current state for correct handling of phi functions at begin of xhandler
  2111         int phi_operand = entry->add_exception_state(cur_state);
  2113         // add entry to the list of xhandlers of this block
  2114         _block->add_exception_handler(entry);
  2116         // add back-edge from xhandler entry to this block
  2117         if (!entry->is_predecessor(_block)) {
  2118           entry->add_predecessor(_block);
  2121         // clone XHandler because phi_operand and scope_count can not be shared
  2122         XHandler* new_xhandler = new XHandler(h);
  2123         new_xhandler->set_phi_operand(phi_operand);
  2124         new_xhandler->set_scope_count(scope_count);
  2125         exception_handlers->append(new_xhandler);
  2127         // fill in exception handler subgraph lazily
  2128         assert(!entry->is_set(BlockBegin::was_visited_flag), "entry must not be visited yet");
  2129         cur_scope_data->add_to_work_list(entry);
  2131         // stop when reaching catchall
  2132         if (h->catch_type() == 0) {
  2133           return exception_handlers;
  2138     if (exception_handlers->length() == 0) {
  2139       // This scope and all callees do not handle exceptions, so the local
  2140       // variables of this scope are not needed. However, the scope itself is
  2141       // required for a correct exception stack trace -> clear out the locals.
  2142       if (_compilation->env()->jvmti_can_access_local_variables()) {
  2143         cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
  2144       } else {
  2145         cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());
  2147       if (prev_state != NULL) {
  2148         prev_state->set_caller_state(cur_state);
  2150       if (instruction->exception_state() == NULL) {
  2151         instruction->set_exception_state(cur_state);
  2155     // Set up iteration for next time.
  2156     // If parsing a jsr, do not grab exception handlers from the
  2157     // parent scopes for this method (already got them, and they
  2158     // needed to be cloned)
  2160     while (cur_scope_data->parsing_jsr()) {
  2161       cur_scope_data = cur_scope_data->parent();
  2164     assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
  2165     assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");
  2167     prev_state = cur_state;
  2168     cur_state = cur_state->caller_state();
  2169     cur_scope_data = cur_scope_data->parent();
  2170     scope_count++;
  2171   } while (cur_scope_data != NULL);
  2173   return exception_handlers;
  2177 // Helper class for simplifying Phis.
  2178 class PhiSimplifier : public BlockClosure {
  2179  private:
  2180   bool _has_substitutions;
  2181   Value simplify(Value v);
  2183  public:
  2184   PhiSimplifier(BlockBegin* start) : _has_substitutions(false) {
  2185     start->iterate_preorder(this);
  2186     if (_has_substitutions) {
  2187       SubstitutionResolver sr(start);
  2190   void block_do(BlockBegin* b);
  2191   bool has_substitutions() const { return _has_substitutions; }
  2192 };
  2195 Value PhiSimplifier::simplify(Value v) {
  2196   Phi* phi = v->as_Phi();
  2198   if (phi == NULL) {
  2199     // no phi function
  2200     return v;
  2201   } else if (v->has_subst()) {
  2202     // already substituted; subst can be phi itself -> simplify
  2203     return simplify(v->subst());
  2204   } else if (phi->is_set(Phi::cannot_simplify)) {
  2205     // already tried to simplify phi before
  2206     return phi;
  2207   } else if (phi->is_set(Phi::visited)) {
  2208     // break cycles in phi functions
  2209     return phi;
  2210   } else if (phi->type()->is_illegal()) {
  2211     // illegal phi functions are ignored anyway
  2212     return phi;
  2214   } else {
  2215     // mark phi function as processed to break cycles in phi functions
  2216     phi->set(Phi::visited);
  2218     // simplify x = [y, x] and x = [y, y] to y
  2219     Value subst = NULL;
  2220     int opd_count = phi->operand_count();
  2221     for (int i = 0; i < opd_count; i++) {
  2222       Value opd = phi->operand_at(i);
  2223       assert(opd != NULL, "Operand must exist!");
  2225       if (opd->type()->is_illegal()) {
  2226         // if one operand is illegal, the entire phi function is illegal
  2227         phi->make_illegal();
  2228         phi->clear(Phi::visited);
  2229         return phi;
  2232       Value new_opd = simplify(opd);
  2233       assert(new_opd != NULL, "Simplified operand must exist!");
  2235       if (new_opd != phi && new_opd != subst) {
  2236         if (subst == NULL) {
  2237           subst = new_opd;
  2238         } else {
  2239           // no simplification possible
  2240           phi->set(Phi::cannot_simplify);
  2241           phi->clear(Phi::visited);
  2242           return phi;
  2247     // sucessfully simplified phi function
  2248     assert(subst != NULL, "illegal phi function");
  2249     _has_substitutions = true;
  2250     phi->clear(Phi::visited);
  2251     phi->set_subst(subst);
  2253 #ifndef PRODUCT
  2254     if (PrintPhiFunctions) {
  2255       tty->print_cr("simplified phi function %c%d to %c%d (Block B%d)", phi->type()->tchar(), phi->id(), subst->type()->tchar(), subst->id(), phi->block()->block_id());
  2257 #endif
  2259     return subst;
  2264 void PhiSimplifier::block_do(BlockBegin* b) {
  2265   for_each_phi_fun(b, phi,
  2266     simplify(phi);
  2267   );
  2269 #ifdef ASSERT
  2270   for_each_phi_fun(b, phi,
  2271                    assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification");
  2272   );
  2274   ValueStack* state = b->state()->caller_state();
  2275   for_each_state_value(state, value,
  2276     Phi* phi = value->as_Phi();
  2277     assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
  2278   );
  2279 #endif
  2282 // This method is called after all blocks are filled with HIR instructions
  2283 // It eliminates all Phi functions of the form x = [y, y] and x = [y, x]
  2284 void GraphBuilder::eliminate_redundant_phis(BlockBegin* start) {
  2285   PhiSimplifier simplifier(start);
  2289 void GraphBuilder::connect_to_end(BlockBegin* beg) {
  2290   // setup iteration
  2291   kill_all();
  2292   _block = beg;
  2293   _state = beg->state()->copy_for_parsing();
  2294   _last  = beg;
  2295   iterate_bytecodes_for_block(beg->bci());
  2299 BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
  2300 #ifndef PRODUCT
  2301   if (PrintIRDuringConstruction) {
  2302     tty->cr();
  2303     InstructionPrinter ip;
  2304     ip.print_instr(_block); tty->cr();
  2305     ip.print_stack(_block->state()); tty->cr();
  2306     ip.print_inline_level(_block);
  2307     ip.print_head();
  2308     tty->print_cr("locals size: %d stack size: %d", state()->locals_size(), state()->stack_size());
  2310 #endif
  2311   _skip_block = false;
  2312   assert(state() != NULL, "ValueStack missing!");
  2313   ciBytecodeStream s(method());
  2314   s.reset_to_bci(bci);
  2315   int prev_bci = bci;
  2316   scope_data()->set_stream(&s);
  2317   // iterate
  2318   Bytecodes::Code code = Bytecodes::_illegal;
  2319   bool push_exception = false;
  2321   if (block()->is_set(BlockBegin::exception_entry_flag) && block()->next() == NULL) {
  2322     // first thing in the exception entry block should be the exception object.
  2323     push_exception = true;
  2326   while (!bailed_out() && last()->as_BlockEnd() == NULL &&
  2327          (code = stream()->next()) != ciBytecodeStream::EOBC() &&
  2328          (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {
  2329     assert(state()->kind() == ValueStack::Parsing, "invalid state kind");
  2331     // Check for active jsr during OSR compilation
  2332     if (compilation()->is_osr_compile()
  2333         && scope()->is_top_scope()
  2334         && parsing_jsr()
  2335         && s.cur_bci() == compilation()->osr_bci()) {
  2336       bailout("OSR not supported while a jsr is active");
  2339     if (push_exception) {
  2340       apush(append(new ExceptionObject()));
  2341       push_exception = false;
  2344     // handle bytecode
  2345     switch (code) {
  2346       case Bytecodes::_nop            : /* nothing to do */ break;
  2347       case Bytecodes::_aconst_null    : apush(append(new Constant(objectNull            ))); break;
  2348       case Bytecodes::_iconst_m1      : ipush(append(new Constant(new IntConstant   (-1)))); break;
  2349       case Bytecodes::_iconst_0       : ipush(append(new Constant(intZero               ))); break;
  2350       case Bytecodes::_iconst_1       : ipush(append(new Constant(intOne                ))); break;
  2351       case Bytecodes::_iconst_2       : ipush(append(new Constant(new IntConstant   ( 2)))); break;
  2352       case Bytecodes::_iconst_3       : ipush(append(new Constant(new IntConstant   ( 3)))); break;
  2353       case Bytecodes::_iconst_4       : ipush(append(new Constant(new IntConstant   ( 4)))); break;
  2354       case Bytecodes::_iconst_5       : ipush(append(new Constant(new IntConstant   ( 5)))); break;
  2355       case Bytecodes::_lconst_0       : lpush(append(new Constant(new LongConstant  ( 0)))); break;
  2356       case Bytecodes::_lconst_1       : lpush(append(new Constant(new LongConstant  ( 1)))); break;
  2357       case Bytecodes::_fconst_0       : fpush(append(new Constant(new FloatConstant ( 0)))); break;
  2358       case Bytecodes::_fconst_1       : fpush(append(new Constant(new FloatConstant ( 1)))); break;
  2359       case Bytecodes::_fconst_2       : fpush(append(new Constant(new FloatConstant ( 2)))); break;
  2360       case Bytecodes::_dconst_0       : dpush(append(new Constant(new DoubleConstant( 0)))); break;
  2361       case Bytecodes::_dconst_1       : dpush(append(new Constant(new DoubleConstant( 1)))); break;
  2362       case Bytecodes::_bipush         : ipush(append(new Constant(new IntConstant(((signed char*)s.cur_bcp())[1])))); break;
  2363       case Bytecodes::_sipush         : ipush(append(new Constant(new IntConstant((short)Bytes::get_Java_u2(s.cur_bcp()+1))))); break;
  2364       case Bytecodes::_ldc            : // fall through
  2365       case Bytecodes::_ldc_w          : // fall through
  2366       case Bytecodes::_ldc2_w         : load_constant(); break;
  2367       case Bytecodes::_iload          : load_local(intType     , s.get_index()); break;
  2368       case Bytecodes::_lload          : load_local(longType    , s.get_index()); break;
  2369       case Bytecodes::_fload          : load_local(floatType   , s.get_index()); break;
  2370       case Bytecodes::_dload          : load_local(doubleType  , s.get_index()); break;
  2371       case Bytecodes::_aload          : load_local(instanceType, s.get_index()); break;
  2372       case Bytecodes::_iload_0        : load_local(intType   , 0); break;
  2373       case Bytecodes::_iload_1        : load_local(intType   , 1); break;
  2374       case Bytecodes::_iload_2        : load_local(intType   , 2); break;
  2375       case Bytecodes::_iload_3        : load_local(intType   , 3); break;
  2376       case Bytecodes::_lload_0        : load_local(longType  , 0); break;
  2377       case Bytecodes::_lload_1        : load_local(longType  , 1); break;
  2378       case Bytecodes::_lload_2        : load_local(longType  , 2); break;
  2379       case Bytecodes::_lload_3        : load_local(longType  , 3); break;
  2380       case Bytecodes::_fload_0        : load_local(floatType , 0); break;
  2381       case Bytecodes::_fload_1        : load_local(floatType , 1); break;
  2382       case Bytecodes::_fload_2        : load_local(floatType , 2); break;
  2383       case Bytecodes::_fload_3        : load_local(floatType , 3); break;
  2384       case Bytecodes::_dload_0        : load_local(doubleType, 0); break;
  2385       case Bytecodes::_dload_1        : load_local(doubleType, 1); break;
  2386       case Bytecodes::_dload_2        : load_local(doubleType, 2); break;
  2387       case Bytecodes::_dload_3        : load_local(doubleType, 3); break;
  2388       case Bytecodes::_aload_0        : load_local(objectType, 0); break;
  2389       case Bytecodes::_aload_1        : load_local(objectType, 1); break;
  2390       case Bytecodes::_aload_2        : load_local(objectType, 2); break;
  2391       case Bytecodes::_aload_3        : load_local(objectType, 3); break;
  2392       case Bytecodes::_iaload         : load_indexed(T_INT   ); break;
  2393       case Bytecodes::_laload         : load_indexed(T_LONG  ); break;
  2394       case Bytecodes::_faload         : load_indexed(T_FLOAT ); break;
  2395       case Bytecodes::_daload         : load_indexed(T_DOUBLE); break;
  2396       case Bytecodes::_aaload         : load_indexed(T_OBJECT); break;
  2397       case Bytecodes::_baload         : load_indexed(T_BYTE  ); break;
  2398       case Bytecodes::_caload         : load_indexed(T_CHAR  ); break;
  2399       case Bytecodes::_saload         : load_indexed(T_SHORT ); break;
  2400       case Bytecodes::_istore         : store_local(intType   , s.get_index()); break;
  2401       case Bytecodes::_lstore         : store_local(longType  , s.get_index()); break;
  2402       case Bytecodes::_fstore         : store_local(floatType , s.get_index()); break;
  2403       case Bytecodes::_dstore         : store_local(doubleType, s.get_index()); break;
  2404       case Bytecodes::_astore         : store_local(objectType, s.get_index()); break;
  2405       case Bytecodes::_istore_0       : store_local(intType   , 0); break;
  2406       case Bytecodes::_istore_1       : store_local(intType   , 1); break;
  2407       case Bytecodes::_istore_2       : store_local(intType   , 2); break;
  2408       case Bytecodes::_istore_3       : store_local(intType   , 3); break;
  2409       case Bytecodes::_lstore_0       : store_local(longType  , 0); break;
  2410       case Bytecodes::_lstore_1       : store_local(longType  , 1); break;
  2411       case Bytecodes::_lstore_2       : store_local(longType  , 2); break;
  2412       case Bytecodes::_lstore_3       : store_local(longType  , 3); break;
  2413       case Bytecodes::_fstore_0       : store_local(floatType , 0); break;
  2414       case Bytecodes::_fstore_1       : store_local(floatType , 1); break;
  2415       case Bytecodes::_fstore_2       : store_local(floatType , 2); break;
  2416       case Bytecodes::_fstore_3       : store_local(floatType , 3); break;
  2417       case Bytecodes::_dstore_0       : store_local(doubleType, 0); break;
  2418       case Bytecodes::_dstore_1       : store_local(doubleType, 1); break;
  2419       case Bytecodes::_dstore_2       : store_local(doubleType, 2); break;
  2420       case Bytecodes::_dstore_3       : store_local(doubleType, 3); break;
  2421       case Bytecodes::_astore_0       : store_local(objectType, 0); break;
  2422       case Bytecodes::_astore_1       : store_local(objectType, 1); break;
  2423       case Bytecodes::_astore_2       : store_local(objectType, 2); break;
  2424       case Bytecodes::_astore_3       : store_local(objectType, 3); break;
  2425       case Bytecodes::_iastore        : store_indexed(T_INT   ); break;
  2426       case Bytecodes::_lastore        : store_indexed(T_LONG  ); break;
  2427       case Bytecodes::_fastore        : store_indexed(T_FLOAT ); break;
  2428       case Bytecodes::_dastore        : store_indexed(T_DOUBLE); break;
  2429       case Bytecodes::_aastore        : store_indexed(T_OBJECT); break;
  2430       case Bytecodes::_bastore        : store_indexed(T_BYTE  ); break;
  2431       case Bytecodes::_castore        : store_indexed(T_CHAR  ); break;
  2432       case Bytecodes::_sastore        : store_indexed(T_SHORT ); break;
  2433       case Bytecodes::_pop            : // fall through
  2434       case Bytecodes::_pop2           : // fall through
  2435       case Bytecodes::_dup            : // fall through
  2436       case Bytecodes::_dup_x1         : // fall through
  2437       case Bytecodes::_dup_x2         : // fall through
  2438       case Bytecodes::_dup2           : // fall through
  2439       case Bytecodes::_dup2_x1        : // fall through
  2440       case Bytecodes::_dup2_x2        : // fall through
  2441       case Bytecodes::_swap           : stack_op(code); break;
  2442       case Bytecodes::_iadd           : arithmetic_op(intType   , code); break;
  2443       case Bytecodes::_ladd           : arithmetic_op(longType  , code); break;
  2444       case Bytecodes::_fadd           : arithmetic_op(floatType , code); break;
  2445       case Bytecodes::_dadd           : arithmetic_op(doubleType, code); break;
  2446       case Bytecodes::_isub           : arithmetic_op(intType   , code); break;
  2447       case Bytecodes::_lsub           : arithmetic_op(longType  , code); break;
  2448       case Bytecodes::_fsub           : arithmetic_op(floatType , code); break;
  2449       case Bytecodes::_dsub           : arithmetic_op(doubleType, code); break;
  2450       case Bytecodes::_imul           : arithmetic_op(intType   , code); break;
  2451       case Bytecodes::_lmul           : arithmetic_op(longType  , code); break;
  2452       case Bytecodes::_fmul           : arithmetic_op(floatType , code); break;
  2453       case Bytecodes::_dmul           : arithmetic_op(doubleType, code); break;
  2454       case Bytecodes::_idiv           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
  2455       case Bytecodes::_ldiv           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
  2456       case Bytecodes::_fdiv           : arithmetic_op(floatType , code); break;
  2457       case Bytecodes::_ddiv           : arithmetic_op(doubleType, code); break;
  2458       case Bytecodes::_irem           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
  2459       case Bytecodes::_lrem           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
  2460       case Bytecodes::_frem           : arithmetic_op(floatType , code); break;
  2461       case Bytecodes::_drem           : arithmetic_op(doubleType, code); break;
  2462       case Bytecodes::_ineg           : negate_op(intType   ); break;
  2463       case Bytecodes::_lneg           : negate_op(longType  ); break;
  2464       case Bytecodes::_fneg           : negate_op(floatType ); break;
  2465       case Bytecodes::_dneg           : negate_op(doubleType); break;
  2466       case Bytecodes::_ishl           : shift_op(intType , code); break;
  2467       case Bytecodes::_lshl           : shift_op(longType, code); break;
  2468       case Bytecodes::_ishr           : shift_op(intType , code); break;
  2469       case Bytecodes::_lshr           : shift_op(longType, code); break;
  2470       case Bytecodes::_iushr          : shift_op(intType , code); break;
  2471       case Bytecodes::_lushr          : shift_op(longType, code); break;
  2472       case Bytecodes::_iand           : logic_op(intType , code); break;
  2473       case Bytecodes::_land           : logic_op(longType, code); break;
  2474       case Bytecodes::_ior            : logic_op(intType , code); break;
  2475       case Bytecodes::_lor            : logic_op(longType, code); break;
  2476       case Bytecodes::_ixor           : logic_op(intType , code); break;
  2477       case Bytecodes::_lxor           : logic_op(longType, code); break;
  2478       case Bytecodes::_iinc           : increment(); break;
  2479       case Bytecodes::_i2l            : convert(code, T_INT   , T_LONG  ); break;
  2480       case Bytecodes::_i2f            : convert(code, T_INT   , T_FLOAT ); break;
  2481       case Bytecodes::_i2d            : convert(code, T_INT   , T_DOUBLE); break;
  2482       case Bytecodes::_l2i            : convert(code, T_LONG  , T_INT   ); break;
  2483       case Bytecodes::_l2f            : convert(code, T_LONG  , T_FLOAT ); break;
  2484       case Bytecodes::_l2d            : convert(code, T_LONG  , T_DOUBLE); break;
  2485       case Bytecodes::_f2i            : convert(code, T_FLOAT , T_INT   ); break;
  2486       case Bytecodes::_f2l            : convert(code, T_FLOAT , T_LONG  ); break;
  2487       case Bytecodes::_f2d            : convert(code, T_FLOAT , T_DOUBLE); break;
  2488       case Bytecodes::_d2i            : convert(code, T_DOUBLE, T_INT   ); break;
  2489       case Bytecodes::_d2l            : convert(code, T_DOUBLE, T_LONG  ); break;
  2490       case Bytecodes::_d2f            : convert(code, T_DOUBLE, T_FLOAT ); break;
  2491       case Bytecodes::_i2b            : convert(code, T_INT   , T_BYTE  ); break;
  2492       case Bytecodes::_i2c            : convert(code, T_INT   , T_CHAR  ); break;
  2493       case Bytecodes::_i2s            : convert(code, T_INT   , T_SHORT ); break;
  2494       case Bytecodes::_lcmp           : compare_op(longType  , code); break;
  2495       case Bytecodes::_fcmpl          : compare_op(floatType , code); break;
  2496       case Bytecodes::_fcmpg          : compare_op(floatType , code); break;
  2497       case Bytecodes::_dcmpl          : compare_op(doubleType, code); break;
  2498       case Bytecodes::_dcmpg          : compare_op(doubleType, code); break;
  2499       case Bytecodes::_ifeq           : if_zero(intType   , If::eql); break;
  2500       case Bytecodes::_ifne           : if_zero(intType   , If::neq); break;
  2501       case Bytecodes::_iflt           : if_zero(intType   , If::lss); break;
  2502       case Bytecodes::_ifge           : if_zero(intType   , If::geq); break;
  2503       case Bytecodes::_ifgt           : if_zero(intType   , If::gtr); break;
  2504       case Bytecodes::_ifle           : if_zero(intType   , If::leq); break;
  2505       case Bytecodes::_if_icmpeq      : if_same(intType   , If::eql); break;
  2506       case Bytecodes::_if_icmpne      : if_same(intType   , If::neq); break;
  2507       case Bytecodes::_if_icmplt      : if_same(intType   , If::lss); break;
  2508       case Bytecodes::_if_icmpge      : if_same(intType   , If::geq); break;
  2509       case Bytecodes::_if_icmpgt      : if_same(intType   , If::gtr); break;
  2510       case Bytecodes::_if_icmple      : if_same(intType   , If::leq); break;
  2511       case Bytecodes::_if_acmpeq      : if_same(objectType, If::eql); break;
  2512       case Bytecodes::_if_acmpne      : if_same(objectType, If::neq); break;
  2513       case Bytecodes::_goto           : _goto(s.cur_bci(), s.get_dest()); break;
  2514       case Bytecodes::_jsr            : jsr(s.get_dest()); break;
  2515       case Bytecodes::_ret            : ret(s.get_index()); break;
  2516       case Bytecodes::_tableswitch    : table_switch(); break;
  2517       case Bytecodes::_lookupswitch   : lookup_switch(); break;
  2518       case Bytecodes::_ireturn        : method_return(ipop()); break;
  2519       case Bytecodes::_lreturn        : method_return(lpop()); break;
  2520       case Bytecodes::_freturn        : method_return(fpop()); break;
  2521       case Bytecodes::_dreturn        : method_return(dpop()); break;
  2522       case Bytecodes::_areturn        : method_return(apop()); break;
  2523       case Bytecodes::_return         : method_return(NULL  ); break;
  2524       case Bytecodes::_getstatic      : // fall through
  2525       case Bytecodes::_putstatic      : // fall through
  2526       case Bytecodes::_getfield       : // fall through
  2527       case Bytecodes::_putfield       : access_field(code); break;
  2528       case Bytecodes::_invokevirtual  : // fall through
  2529       case Bytecodes::_invokespecial  : // fall through
  2530       case Bytecodes::_invokestatic   : // fall through
  2531       case Bytecodes::_invokedynamic  : // fall through
  2532       case Bytecodes::_invokeinterface: invoke(code); break;
  2533       case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
  2534       case Bytecodes::_newarray       : new_type_array(); break;
  2535       case Bytecodes::_anewarray      : new_object_array(); break;
  2536       case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
  2537       case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
  2538       case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
  2539       case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
  2540       case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
  2541       case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
  2542       case Bytecodes::_wide           : ShouldNotReachHere(); break;
  2543       case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break;
  2544       case Bytecodes::_ifnull         : if_null(objectType, If::eql); break;
  2545       case Bytecodes::_ifnonnull      : if_null(objectType, If::neq); break;
  2546       case Bytecodes::_goto_w         : _goto(s.cur_bci(), s.get_far_dest()); break;
  2547       case Bytecodes::_jsr_w          : jsr(s.get_far_dest()); break;
  2548       case Bytecodes::_breakpoint     : BAILOUT_("concurrent setting of breakpoint", NULL);
  2549       default                         : ShouldNotReachHere(); break;
  2551     // save current bci to setup Goto at the end
  2552     prev_bci = s.cur_bci();
  2554   CHECK_BAILOUT_(NULL);
  2555   // stop processing of this block (see try_inline_full)
  2556   if (_skip_block) {
  2557     _skip_block = false;
  2558     assert(_last && _last->as_BlockEnd(), "");
  2559     return _last->as_BlockEnd();
  2561   // if there are any, check if last instruction is a BlockEnd instruction
  2562   BlockEnd* end = last()->as_BlockEnd();
  2563   if (end == NULL) {
  2564     // all blocks must end with a BlockEnd instruction => add a Goto
  2565     end = new Goto(block_at(s.cur_bci()), false);
  2566     append(end);
  2568   assert(end == last()->as_BlockEnd(), "inconsistency");
  2570   assert(end->state() != NULL, "state must already be present");
  2571   assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");
  2573   // connect to begin & set state
  2574   // NOTE that inlining may have changed the block we are parsing
  2575   block()->set_end(end);
  2576   // propagate state
  2577   for (int i = end->number_of_sux() - 1; i >= 0; i--) {
  2578     BlockBegin* sux = end->sux_at(i);
  2579     assert(sux->is_predecessor(block()), "predecessor missing");
  2580     // be careful, bailout if bytecodes are strange
  2581     if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);
  2582     scope_data()->add_to_work_list(end->sux_at(i));
  2585   scope_data()->set_stream(NULL);
  2587   // done
  2588   return end;
  2592 void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining) {
  2593   do {
  2594     if (start_in_current_block_for_inlining && !bailed_out()) {
  2595       iterate_bytecodes_for_block(0);
  2596       start_in_current_block_for_inlining = false;
  2597     } else {
  2598       BlockBegin* b;
  2599       while ((b = scope_data()->remove_from_work_list()) != NULL) {
  2600         if (!b->is_set(BlockBegin::was_visited_flag)) {
  2601           if (b->is_set(BlockBegin::osr_entry_flag)) {
  2602             // we're about to parse the osr entry block, so make sure
  2603             // we setup the OSR edge leading into this block so that
  2604             // Phis get setup correctly.
  2605             setup_osr_entry_block();
  2606             // this is no longer the osr entry block, so clear it.
  2607             b->clear(BlockBegin::osr_entry_flag);
  2609           b->set(BlockBegin::was_visited_flag);
  2610           connect_to_end(b);
  2614   } while (!bailed_out() && !scope_data()->is_work_list_empty());
  2618 bool GraphBuilder::_can_trap      [Bytecodes::number_of_java_codes];
  2620 void GraphBuilder::initialize() {
  2621   // the following bytecodes are assumed to potentially
  2622   // throw exceptions in compiled code - note that e.g.
  2623   // monitorexit & the return bytecodes do not throw
  2624   // exceptions since monitor pairing proved that they
  2625   // succeed (if monitor pairing succeeded)
  2626   Bytecodes::Code can_trap_list[] =
  2627     { Bytecodes::_ldc
  2628     , Bytecodes::_ldc_w
  2629     , Bytecodes::_ldc2_w
  2630     , Bytecodes::_iaload
  2631     , Bytecodes::_laload
  2632     , Bytecodes::_faload
  2633     , Bytecodes::_daload
  2634     , Bytecodes::_aaload
  2635     , Bytecodes::_baload
  2636     , Bytecodes::_caload
  2637     , Bytecodes::_saload
  2638     , Bytecodes::_iastore
  2639     , Bytecodes::_lastore
  2640     , Bytecodes::_fastore
  2641     , Bytecodes::_dastore
  2642     , Bytecodes::_aastore
  2643     , Bytecodes::_bastore
  2644     , Bytecodes::_castore
  2645     , Bytecodes::_sastore
  2646     , Bytecodes::_idiv
  2647     , Bytecodes::_ldiv
  2648     , Bytecodes::_irem
  2649     , Bytecodes::_lrem
  2650     , Bytecodes::_getstatic
  2651     , Bytecodes::_putstatic
  2652     , Bytecodes::_getfield
  2653     , Bytecodes::_putfield
  2654     , Bytecodes::_invokevirtual
  2655     , Bytecodes::_invokespecial
  2656     , Bytecodes::_invokestatic
  2657     , Bytecodes::_invokedynamic
  2658     , Bytecodes::_invokeinterface
  2659     , Bytecodes::_new
  2660     , Bytecodes::_newarray
  2661     , Bytecodes::_anewarray
  2662     , Bytecodes::_arraylength
  2663     , Bytecodes::_athrow
  2664     , Bytecodes::_checkcast
  2665     , Bytecodes::_instanceof
  2666     , Bytecodes::_monitorenter
  2667     , Bytecodes::_multianewarray
  2668     };
  2670   // inititialize trap tables
  2671   for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {
  2672     _can_trap[i] = false;
  2674   // set standard trap info
  2675   for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {
  2676     _can_trap[can_trap_list[j]] = true;
  2681 BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
  2682   assert(entry->is_set(f), "entry/flag mismatch");
  2683   // create header block
  2684   BlockBegin* h = new BlockBegin(entry->bci());
  2685   h->set_depth_first_number(0);
  2687   Value l = h;
  2688   BlockEnd* g = new Goto(entry, false);
  2689   l->set_next(g, entry->bci());
  2690   h->set_end(g);
  2691   h->set(f);
  2692   // setup header block end state
  2693   ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)
  2694   assert(s->stack_is_empty(), "must have empty stack at entry point");
  2695   g->set_state(s);
  2696   return h;
  2701 BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry, BlockBegin* osr_entry, ValueStack* state) {
  2702   BlockBegin* start = new BlockBegin(0);
  2704   // This code eliminates the empty start block at the beginning of
  2705   // each method.  Previously, each method started with the
  2706   // start-block created below, and this block was followed by the
  2707   // header block that was always empty.  This header block is only
  2708   // necesary if std_entry is also a backward branch target because
  2709   // then phi functions may be necessary in the header block.  It's
  2710   // also necessary when profiling so that there's a single block that
  2711   // can increment the interpreter_invocation_count.
  2712   BlockBegin* new_header_block;
  2713   if (std_entry->number_of_preds() > 0 || count_invocations() || count_backedges()) {
  2714     new_header_block = header_block(std_entry, BlockBegin::std_entry_flag, state);
  2715   } else {
  2716     new_header_block = std_entry;
  2719   // setup start block (root for the IR graph)
  2720   Base* base =
  2721     new Base(
  2722       new_header_block,
  2723       osr_entry
  2724     );
  2725   start->set_next(base, 0);
  2726   start->set_end(base);
  2727   // create & setup state for start block
  2728   start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
  2729   base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
  2731   if (base->std_entry()->state() == NULL) {
  2732     // setup states for header blocks
  2733     base->std_entry()->merge(state);
  2736   assert(base->std_entry()->state() != NULL, "");
  2737   return start;
  2741 void GraphBuilder::setup_osr_entry_block() {
  2742   assert(compilation()->is_osr_compile(), "only for osrs");
  2744   int osr_bci = compilation()->osr_bci();
  2745   ciBytecodeStream s(method());
  2746   s.reset_to_bci(osr_bci);
  2747   s.next();
  2748   scope_data()->set_stream(&s);
  2750   // create a new block to be the osr setup code
  2751   _osr_entry = new BlockBegin(osr_bci);
  2752   _osr_entry->set(BlockBegin::osr_entry_flag);
  2753   _osr_entry->set_depth_first_number(0);
  2754   BlockBegin* target = bci2block()->at(osr_bci);
  2755   assert(target != NULL && target->is_set(BlockBegin::osr_entry_flag), "must be there");
  2756   // the osr entry has no values for locals
  2757   ValueStack* state = target->state()->copy();
  2758   _osr_entry->set_state(state);
  2760   kill_all();
  2761   _block = _osr_entry;
  2762   _state = _osr_entry->state()->copy();
  2763   assert(_state->bci() == osr_bci, "mismatch");
  2764   _last  = _osr_entry;
  2765   Value e = append(new OsrEntry());
  2766   e->set_needs_null_check(false);
  2768   // OSR buffer is
  2769   //
  2770   // locals[nlocals-1..0]
  2771   // monitors[number_of_locks-1..0]
  2772   //
  2773   // locals is a direct copy of the interpreter frame so in the osr buffer
  2774   // so first slot in the local array is the last local from the interpreter
  2775   // and last slot is local[0] (receiver) from the interpreter
  2776   //
  2777   // Similarly with locks. The first lock slot in the osr buffer is the nth lock
  2778   // from the interpreter frame, the nth lock slot in the osr buffer is 0th lock
  2779   // in the interpreter frame (the method lock if a sync method)
  2781   // Initialize monitors in the compiled activation.
  2783   int index;
  2784   Value local;
  2786   // find all the locals that the interpreter thinks contain live oops
  2787   const BitMap live_oops = method()->live_local_oops_at_bci(osr_bci);
  2789   // compute the offset into the locals so that we can treat the buffer
  2790   // as if the locals were still in the interpreter frame
  2791   int locals_offset = BytesPerWord * (method()->max_locals() - 1);
  2792   for_each_local_value(state, index, local) {
  2793     int offset = locals_offset - (index + local->type()->size() - 1) * BytesPerWord;
  2794     Value get;
  2795     if (local->type()->is_object_kind() && !live_oops.at(index)) {
  2796       // The interpreter thinks this local is dead but the compiler
  2797       // doesn't so pretend that the interpreter passed in null.
  2798       get = append(new Constant(objectNull));
  2799     } else {
  2800       get = append(new UnsafeGetRaw(as_BasicType(local->type()), e,
  2801                                     append(new Constant(new IntConstant(offset))),
  2802                                     0,
  2803                                     true /*unaligned*/, true /*wide*/));
  2805     _state->store_local(index, get);
  2808   // the storage for the OSR buffer is freed manually in the LIRGenerator.
  2810   assert(state->caller_state() == NULL, "should be top scope");
  2811   state->clear_locals();
  2812   Goto* g = new Goto(target, false);
  2813   append(g);
  2814   _osr_entry->set_end(g);
  2815   target->merge(_osr_entry->end()->state());
  2817   scope_data()->set_stream(NULL);
  2821 ValueStack* GraphBuilder::state_at_entry() {
  2822   ValueStack* state = new ValueStack(scope(), NULL);
  2824   // Set up locals for receiver
  2825   int idx = 0;
  2826   if (!method()->is_static()) {
  2827     // we should always see the receiver
  2828     state->store_local(idx, new Local(method()->holder(), objectType, idx));
  2829     idx = 1;
  2832   // Set up locals for incoming arguments
  2833   ciSignature* sig = method()->signature();
  2834   for (int i = 0; i < sig->count(); i++) {
  2835     ciType* type = sig->type_at(i);
  2836     BasicType basic_type = type->basic_type();
  2837     // don't allow T_ARRAY to propagate into locals types
  2838     if (basic_type == T_ARRAY) basic_type = T_OBJECT;
  2839     ValueType* vt = as_ValueType(basic_type);
  2840     state->store_local(idx, new Local(type, vt, idx));
  2841     idx += type->size();
  2844   // lock synchronized method
  2845   if (method()->is_synchronized()) {
  2846     state->lock(NULL);
  2849   return state;
  2853 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
  2854   : _scope_data(NULL)
  2855   , _instruction_count(0)
  2856   , _osr_entry(NULL)
  2857   , _memory(new MemoryBuffer())
  2858   , _compilation(compilation)
  2859   , _inline_bailout_msg(NULL)
  2861   int osr_bci = compilation->osr_bci();
  2863   // determine entry points and bci2block mapping
  2864   BlockListBuilder blm(compilation, scope, osr_bci);
  2865   CHECK_BAILOUT();
  2867   BlockList* bci2block = blm.bci2block();
  2868   BlockBegin* start_block = bci2block->at(0);
  2870   push_root_scope(scope, bci2block, start_block);
  2872   // setup state for std entry
  2873   _initial_state = state_at_entry();
  2874   start_block->merge(_initial_state);
  2876   // complete graph
  2877   _vmap        = new ValueMap();
  2878   switch (scope->method()->intrinsic_id()) {
  2879   case vmIntrinsics::_dabs          : // fall through
  2880   case vmIntrinsics::_dsqrt         : // fall through
  2881   case vmIntrinsics::_dsin          : // fall through
  2882   case vmIntrinsics::_dcos          : // fall through
  2883   case vmIntrinsics::_dtan          : // fall through
  2884   case vmIntrinsics::_dlog          : // fall through
  2885   case vmIntrinsics::_dlog10        : // fall through
  2887       // Compiles where the root method is an intrinsic need a special
  2888       // compilation environment because the bytecodes for the method
  2889       // shouldn't be parsed during the compilation, only the special
  2890       // Intrinsic node should be emitted.  If this isn't done the the
  2891       // code for the inlined version will be different than the root
  2892       // compiled version which could lead to monotonicity problems on
  2893       // intel.
  2895       // Set up a stream so that appending instructions works properly.
  2896       ciBytecodeStream s(scope->method());
  2897       s.reset_to_bci(0);
  2898       scope_data()->set_stream(&s);
  2899       s.next();
  2901       // setup the initial block state
  2902       _block = start_block;
  2903       _state = start_block->state()->copy_for_parsing();
  2904       _last  = start_block;
  2905       load_local(doubleType, 0);
  2907       // Emit the intrinsic node.
  2908       bool result = try_inline_intrinsics(scope->method());
  2909       if (!result) BAILOUT("failed to inline intrinsic");
  2910       method_return(dpop());
  2912       // connect the begin and end blocks and we're all done.
  2913       BlockEnd* end = last()->as_BlockEnd();
  2914       block()->set_end(end);
  2915       break;
  2918   case vmIntrinsics::_Reference_get:
  2920       if (UseG1GC) {
  2921         // With java.lang.ref.reference.get() we must go through the
  2922         // intrinsic - when G1 is enabled - even when get() is the root
  2923         // method of the compile so that, if necessary, the value in
  2924         // the referent field of the reference object gets recorded by
  2925         // the pre-barrier code.
  2926         // Specifically, if G1 is enabled, the value in the referent
  2927         // field is recorded by the G1 SATB pre barrier. This will
  2928         // result in the referent being marked live and the reference
  2929         // object removed from the list of discovered references during
  2930         // reference processing.
  2932         // Set up a stream so that appending instructions works properly.
  2933         ciBytecodeStream s(scope->method());
  2934         s.reset_to_bci(0);
  2935         scope_data()->set_stream(&s);
  2936         s.next();
  2938         // setup the initial block state
  2939         _block = start_block;
  2940         _state = start_block->state()->copy_for_parsing();
  2941         _last  = start_block;
  2942         load_local(objectType, 0);
  2944         // Emit the intrinsic node.
  2945         bool result = try_inline_intrinsics(scope->method());
  2946         if (!result) BAILOUT("failed to inline intrinsic");
  2947         method_return(apop());
  2949         // connect the begin and end blocks and we're all done.
  2950         BlockEnd* end = last()->as_BlockEnd();
  2951         block()->set_end(end);
  2952         break;
  2954       // Otherwise, fall thru
  2957   default:
  2958     scope_data()->add_to_work_list(start_block);
  2959     iterate_all_blocks();
  2960     break;
  2962   CHECK_BAILOUT();
  2964   _start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
  2966   eliminate_redundant_phis(_start);
  2968   NOT_PRODUCT(if (PrintValueNumbering && Verbose) print_stats());
  2969   // for osr compile, bailout if some requirements are not fulfilled
  2970   if (osr_bci != -1) {
  2971     BlockBegin* osr_block = blm.bci2block()->at(osr_bci);
  2972     assert(osr_block->is_set(BlockBegin::was_visited_flag),"osr entry must have been visited for osr compile");
  2974     // check if osr entry point has empty stack - we cannot handle non-empty stacks at osr entry points
  2975     if (!osr_block->state()->stack_is_empty()) {
  2976       BAILOUT("stack not empty at OSR entry point");
  2979 #ifndef PRODUCT
  2980   if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count);
  2981 #endif
  2985 ValueStack* GraphBuilder::copy_state_before() {
  2986   return copy_state_before_with_bci(bci());
  2989 ValueStack* GraphBuilder::copy_state_exhandling() {
  2990   return copy_state_exhandling_with_bci(bci());
  2993 ValueStack* GraphBuilder::copy_state_for_exception() {
  2994   return copy_state_for_exception_with_bci(bci());
  2997 ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {
  2998   return state()->copy(ValueStack::StateBefore, bci);
  3001 ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {
  3002   if (!has_handler()) return NULL;
  3003   return state()->copy(ValueStack::StateBefore, bci);
  3006 ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {
  3007   ValueStack* s = copy_state_exhandling_with_bci(bci);
  3008   if (s == NULL) {
  3009     if (_compilation->env()->jvmti_can_access_local_variables()) {
  3010       s = state()->copy(ValueStack::ExceptionState, bci);
  3011     } else {
  3012       s = state()->copy(ValueStack::EmptyExceptionState, bci);
  3015   return s;
  3018 int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
  3019   int recur_level = 0;
  3020   for (IRScope* s = scope(); s != NULL; s = s->caller()) {
  3021     if (s->method() == cur_callee) {
  3022       ++recur_level;
  3025   return recur_level;
  3029 bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known) {
  3030   // Clear out any existing inline bailout condition
  3031   clear_inline_bailout();
  3033   if (callee->should_exclude()) {
  3034     // callee is excluded
  3035     INLINE_BAILOUT("excluded by CompilerOracle")
  3036   } else if (callee->should_not_inline()) {
  3037     // callee is excluded
  3038     INLINE_BAILOUT("disallowed by CompilerOracle")
  3039   } else if (!callee->can_be_compiled()) {
  3040     // callee is not compilable (prob. has breakpoints)
  3041     INLINE_BAILOUT("not compilable")
  3042   } else if (callee->intrinsic_id() != vmIntrinsics::_none && try_inline_intrinsics(callee)) {
  3043     // intrinsics can be native or not
  3044     return true;
  3045   } else if (callee->is_native()) {
  3046     // non-intrinsic natives cannot be inlined
  3047     INLINE_BAILOUT("non-intrinsic native")
  3048   } else if (callee->is_abstract()) {
  3049     INLINE_BAILOUT("abstract")
  3050   } else {
  3051     return try_inline_full(callee, holder_known);
  3056 bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
  3057   if (!InlineNatives           ) INLINE_BAILOUT("intrinsic method inlining disabled");
  3058   if (callee->is_synchronized()) {
  3059     // We don't currently support any synchronized intrinsics
  3060     return false;
  3063   // callee seems like a good candidate
  3064   // determine id
  3065   bool preserves_state = false;
  3066   bool cantrap = true;
  3067   vmIntrinsics::ID id = callee->intrinsic_id();
  3068   switch (id) {
  3069     case vmIntrinsics::_arraycopy     :
  3070       if (!InlineArrayCopy) return false;
  3071       break;
  3073     case vmIntrinsics::_currentTimeMillis:
  3074     case vmIntrinsics::_nanoTime:
  3075       preserves_state = true;
  3076       cantrap = false;
  3077       break;
  3079     case vmIntrinsics::_floatToRawIntBits   :
  3080     case vmIntrinsics::_intBitsToFloat      :
  3081     case vmIntrinsics::_doubleToRawLongBits :
  3082     case vmIntrinsics::_longBitsToDouble    :
  3083       if (!InlineMathNatives) return false;
  3084       preserves_state = true;
  3085       cantrap = false;
  3086       break;
  3088     case vmIntrinsics::_getClass      :
  3089       if (!InlineClassNatives) return false;
  3090       preserves_state = true;
  3091       break;
  3093     case vmIntrinsics::_currentThread :
  3094       if (!InlineThreadNatives) return false;
  3095       preserves_state = true;
  3096       cantrap = false;
  3097       break;
  3099     case vmIntrinsics::_dabs          : // fall through
  3100     case vmIntrinsics::_dsqrt         : // fall through
  3101     case vmIntrinsics::_dsin          : // fall through
  3102     case vmIntrinsics::_dcos          : // fall through
  3103     case vmIntrinsics::_dtan          : // fall through
  3104     case vmIntrinsics::_dlog          : // fall through
  3105     case vmIntrinsics::_dlog10        : // fall through
  3106       if (!InlineMathNatives) return false;
  3107       cantrap = false;
  3108       preserves_state = true;
  3109       break;
  3111     // sun/misc/AtomicLong.attemptUpdate
  3112     case vmIntrinsics::_attemptUpdate :
  3113       if (!VM_Version::supports_cx8()) return false;
  3114       if (!InlineAtomicLong) return false;
  3115       preserves_state = true;
  3116       break;
  3118     // Use special nodes for Unsafe instructions so we can more easily
  3119     // perform an address-mode optimization on the raw variants
  3120     case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT,  false);
  3121     case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
  3122     case vmIntrinsics::_getByte   : return append_unsafe_get_obj(callee, T_BYTE,    false);
  3123     case vmIntrinsics::_getShort  : return append_unsafe_get_obj(callee, T_SHORT,   false);
  3124     case vmIntrinsics::_getChar   : return append_unsafe_get_obj(callee, T_CHAR,    false);
  3125     case vmIntrinsics::_getInt    : return append_unsafe_get_obj(callee, T_INT,     false);
  3126     case vmIntrinsics::_getLong   : return append_unsafe_get_obj(callee, T_LONG,    false);
  3127     case vmIntrinsics::_getFloat  : return append_unsafe_get_obj(callee, T_FLOAT,   false);
  3128     case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE,  false);
  3130     case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT,  false);
  3131     case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);
  3132     case vmIntrinsics::_putByte   : return append_unsafe_put_obj(callee, T_BYTE,    false);
  3133     case vmIntrinsics::_putShort  : return append_unsafe_put_obj(callee, T_SHORT,   false);
  3134     case vmIntrinsics::_putChar   : return append_unsafe_put_obj(callee, T_CHAR,    false);
  3135     case vmIntrinsics::_putInt    : return append_unsafe_put_obj(callee, T_INT,     false);
  3136     case vmIntrinsics::_putLong   : return append_unsafe_put_obj(callee, T_LONG,    false);
  3137     case vmIntrinsics::_putFloat  : return append_unsafe_put_obj(callee, T_FLOAT,   false);
  3138     case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE,  false);
  3140     case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT,  true);
  3141     case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);
  3142     case vmIntrinsics::_getByteVolatile   : return append_unsafe_get_obj(callee, T_BYTE,    true);
  3143     case vmIntrinsics::_getShortVolatile  : return append_unsafe_get_obj(callee, T_SHORT,   true);
  3144     case vmIntrinsics::_getCharVolatile   : return append_unsafe_get_obj(callee, T_CHAR,    true);
  3145     case vmIntrinsics::_getIntVolatile    : return append_unsafe_get_obj(callee, T_INT,     true);
  3146     case vmIntrinsics::_getLongVolatile   : return append_unsafe_get_obj(callee, T_LONG,    true);
  3147     case vmIntrinsics::_getFloatVolatile  : return append_unsafe_get_obj(callee, T_FLOAT,   true);
  3148     case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE,  true);
  3150     case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT,  true);
  3151     case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);
  3152     case vmIntrinsics::_putByteVolatile   : return append_unsafe_put_obj(callee, T_BYTE,    true);
  3153     case vmIntrinsics::_putShortVolatile  : return append_unsafe_put_obj(callee, T_SHORT,   true);
  3154     case vmIntrinsics::_putCharVolatile   : return append_unsafe_put_obj(callee, T_CHAR,    true);
  3155     case vmIntrinsics::_putIntVolatile    : return append_unsafe_put_obj(callee, T_INT,     true);
  3156     case vmIntrinsics::_putLongVolatile   : return append_unsafe_put_obj(callee, T_LONG,    true);
  3157     case vmIntrinsics::_putFloatVolatile  : return append_unsafe_put_obj(callee, T_FLOAT,   true);
  3158     case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE,  true);
  3160     case vmIntrinsics::_getByte_raw   : return append_unsafe_get_raw(callee, T_BYTE);
  3161     case vmIntrinsics::_getShort_raw  : return append_unsafe_get_raw(callee, T_SHORT);
  3162     case vmIntrinsics::_getChar_raw   : return append_unsafe_get_raw(callee, T_CHAR);
  3163     case vmIntrinsics::_getInt_raw    : return append_unsafe_get_raw(callee, T_INT);
  3164     case vmIntrinsics::_getLong_raw   : return append_unsafe_get_raw(callee, T_LONG);
  3165     case vmIntrinsics::_getFloat_raw  : return append_unsafe_get_raw(callee, T_FLOAT);
  3166     case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);
  3168     case vmIntrinsics::_putByte_raw   : return append_unsafe_put_raw(callee, T_BYTE);
  3169     case vmIntrinsics::_putShort_raw  : return append_unsafe_put_raw(callee, T_SHORT);
  3170     case vmIntrinsics::_putChar_raw   : return append_unsafe_put_raw(callee, T_CHAR);
  3171     case vmIntrinsics::_putInt_raw    : return append_unsafe_put_raw(callee, T_INT);
  3172     case vmIntrinsics::_putLong_raw   : return append_unsafe_put_raw(callee, T_LONG);
  3173     case vmIntrinsics::_putFloat_raw  : return append_unsafe_put_raw(callee, T_FLOAT);
  3174     case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);
  3176     case vmIntrinsics::_prefetchRead        : return append_unsafe_prefetch(callee, false, false);
  3177     case vmIntrinsics::_prefetchWrite       : return append_unsafe_prefetch(callee, false, true);
  3178     case vmIntrinsics::_prefetchReadStatic  : return append_unsafe_prefetch(callee, true,  false);
  3179     case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true,  true);
  3181     case vmIntrinsics::_checkIndex    :
  3182       if (!InlineNIOCheckIndex) return false;
  3183       preserves_state = true;
  3184       break;
  3185     case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT,  true);
  3186     case vmIntrinsics::_putOrderedInt    : return append_unsafe_put_obj(callee, T_INT,     true);
  3187     case vmIntrinsics::_putOrderedLong   : return append_unsafe_put_obj(callee, T_LONG,    true);
  3189     case vmIntrinsics::_compareAndSwapLong:
  3190       if (!VM_Version::supports_cx8()) return false;
  3191       // fall through
  3192     case vmIntrinsics::_compareAndSwapInt:
  3193     case vmIntrinsics::_compareAndSwapObject:
  3194       append_unsafe_CAS(callee);
  3195       return true;
  3197     case vmIntrinsics::_Reference_get:
  3198       // It is only when G1 is enabled that we absolutely
  3199       // need to use the intrinsic version of Reference.get()
  3200       // so that the value in the referent field, if necessary,
  3201       // can be registered by the pre-barrier code.
  3202       if (!UseG1GC) return false;
  3203       preserves_state = true;
  3204       break;
  3206     default                       : return false; // do not inline
  3208   // create intrinsic node
  3209   const bool has_receiver = !callee->is_static();
  3210   ValueType* result_type = as_ValueType(callee->return_type());
  3211   ValueStack* state_before = copy_state_for_exception();
  3213   Values* args = state()->pop_arguments(callee->arg_size());
  3215   if (is_profiling()) {
  3216     // Don't profile in the special case where the root method
  3217     // is the intrinsic
  3218     if (callee != method()) {
  3219       // Note that we'd collect profile data in this method if we wanted it.
  3220       compilation()->set_would_profile(true);
  3221       if (profile_calls()) {
  3222         Value recv = NULL;
  3223         if (has_receiver) {
  3224           recv = args->at(0);
  3225           null_check(recv);
  3227         profile_call(recv, NULL);
  3232   Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
  3233                                     preserves_state, cantrap);
  3234   // append instruction & push result
  3235   Value value = append_split(result);
  3236   if (result_type != voidType) push(result_type, value);
  3238 #ifndef PRODUCT
  3239   // printing
  3240   if (PrintInlining) {
  3241     print_inline_result(callee, true);
  3243 #endif
  3245   // done
  3246   return true;
  3250 bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
  3251   // Introduce a new callee continuation point - all Ret instructions
  3252   // will be replaced with Gotos to this point.
  3253   BlockBegin* cont = block_at(next_bci());
  3254   assert(cont != NULL, "continuation must exist (BlockListBuilder starts a new block after a jsr");
  3256   // Note: can not assign state to continuation yet, as we have to
  3257   // pick up the state from the Ret instructions.
  3259   // Push callee scope
  3260   push_scope_for_jsr(cont, jsr_dest_bci);
  3262   // Temporarily set up bytecode stream so we can append instructions
  3263   // (only using the bci of this stream)
  3264   scope_data()->set_stream(scope_data()->parent()->stream());
  3266   BlockBegin* jsr_start_block = block_at(jsr_dest_bci);
  3267   assert(jsr_start_block != NULL, "jsr start block must exist");
  3268   assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");
  3269   Goto* goto_sub = new Goto(jsr_start_block, false);
  3270   // Must copy state to avoid wrong sharing when parsing bytecodes
  3271   assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");
  3272   jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));
  3273   append(goto_sub);
  3274   _block->set_end(goto_sub);
  3275   _last = _block = jsr_start_block;
  3277   // Clear out bytecode stream
  3278   scope_data()->set_stream(NULL);
  3280   scope_data()->add_to_work_list(jsr_start_block);
  3282   // Ready to resume parsing in subroutine
  3283   iterate_all_blocks();
  3285   // If we bailed out during parsing, return immediately (this is bad news)
  3286   CHECK_BAILOUT_(false);
  3288   // Detect whether the continuation can actually be reached. If not,
  3289   // it has not had state set by the join() operations in
  3290   // iterate_bytecodes_for_block()/ret() and we should not touch the
  3291   // iteration state. The calling activation of
  3292   // iterate_bytecodes_for_block will then complete normally.
  3293   if (cont->state() != NULL) {
  3294     if (!cont->is_set(BlockBegin::was_visited_flag)) {
  3295       // add continuation to work list instead of parsing it immediately
  3296       scope_data()->parent()->add_to_work_list(cont);
  3300   assert(jsr_continuation() == cont, "continuation must not have changed");
  3301   assert(!jsr_continuation()->is_set(BlockBegin::was_visited_flag) ||
  3302          jsr_continuation()->is_set(BlockBegin::parser_loop_header_flag),
  3303          "continuation can only be visited in case of backward branches");
  3304   assert(_last && _last->as_BlockEnd(), "block must have end");
  3306   // continuation is in work list, so end iteration of current block
  3307   _skip_block = true;
  3308   pop_scope_for_jsr();
  3310   return true;
  3314 // Inline the entry of a synchronized method as a monitor enter and
  3315 // register the exception handler which releases the monitor if an
  3316 // exception is thrown within the callee. Note that the monitor enter
  3317 // cannot throw an exception itself, because the receiver is
  3318 // guaranteed to be non-null by the explicit null check at the
  3319 // beginning of inlining.
  3320 void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {
  3321   assert(lock != NULL && sync_handler != NULL, "lock or handler missing");
  3323   monitorenter(lock, SynchronizationEntryBCI);
  3324   assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");
  3325   _last->set_needs_null_check(false);
  3327   sync_handler->set(BlockBegin::exception_entry_flag);
  3328   sync_handler->set(BlockBegin::is_on_work_list_flag);
  3330   ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
  3331   XHandler* h = new XHandler(desc);
  3332   h->set_entry_block(sync_handler);
  3333   scope_data()->xhandlers()->append(h);
  3334   scope_data()->set_has_handler();
  3338 // If an exception is thrown and not handled within an inlined
  3339 // synchronized method, the monitor must be released before the
  3340 // exception is rethrown in the outer scope. Generate the appropriate
  3341 // instructions here.
  3342 void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler) {
  3343   BlockBegin* orig_block = _block;
  3344   ValueStack* orig_state = _state;
  3345   Instruction* orig_last = _last;
  3346   _last = _block = sync_handler;
  3347   _state = sync_handler->state()->copy();
  3349   assert(sync_handler != NULL, "handler missing");
  3350   assert(!sync_handler->is_set(BlockBegin::was_visited_flag), "is visited here");
  3352   assert(lock != NULL || default_handler, "lock or handler missing");
  3354   XHandler* h = scope_data()->xhandlers()->remove_last();
  3355   assert(h->entry_block() == sync_handler, "corrupt list of handlers");
  3357   block()->set(BlockBegin::was_visited_flag);
  3358   Value exception = append_with_bci(new ExceptionObject(), SynchronizationEntryBCI);
  3359   assert(exception->is_pinned(), "must be");
  3361   int bci = SynchronizationEntryBCI;
  3362   if (compilation()->env()->dtrace_method_probes()) {
  3363     // Report exit from inline methods.  We don't have a stream here
  3364     // so pass an explicit bci of SynchronizationEntryBCI.
  3365     Values* args = new Values(1);
  3366     args->push(append_with_bci(new Constant(new ObjectConstant(method())), bci));
  3367     append_with_bci(new RuntimeCall(voidType, "dtrace_method_exit", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), args), bci);
  3370   if (lock) {
  3371     assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");
  3372     if (!lock->is_linked()) {
  3373       lock = append_with_bci(lock, bci);
  3376     // exit the monitor in the context of the synchronized method
  3377     monitorexit(lock, bci);
  3379     // exit the context of the synchronized method
  3380     if (!default_handler) {
  3381       pop_scope();
  3382       bci = _state->caller_state()->bci();
  3383       _state = _state->caller_state()->copy_for_parsing();
  3387   // perform the throw as if at the the call site
  3388   apush(exception);
  3389   throw_op(bci);
  3391   BlockEnd* end = last()->as_BlockEnd();
  3392   block()->set_end(end);
  3394   _block = orig_block;
  3395   _state = orig_state;
  3396   _last = orig_last;
  3400 bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
  3401   assert(!callee->is_native(), "callee must not be native");
  3402   if (CompilationPolicy::policy()->should_not_inline(compilation()->env(), callee)) {
  3403     INLINE_BAILOUT("inlining prohibited by policy");
  3405   // first perform tests of things it's not possible to inline
  3406   if (callee->has_exception_handlers() &&
  3407       !InlineMethodsWithExceptionHandlers) INLINE_BAILOUT("callee has exception handlers");
  3408   if (callee->is_synchronized() &&
  3409       !InlineSynchronizedMethods         ) INLINE_BAILOUT("callee is synchronized");
  3410   if (!callee->holder()->is_initialized()) INLINE_BAILOUT("callee's klass not initialized yet");
  3411   if (!callee->has_balanced_monitors())    INLINE_BAILOUT("callee's monitors do not match");
  3413   // Proper inlining of methods with jsrs requires a little more work.
  3414   if (callee->has_jsrs()                 ) INLINE_BAILOUT("jsrs not handled properly by inliner yet");
  3416   // When SSE2 is used on intel, then no special handling is needed
  3417   // for strictfp because the enum-constant is fixed at compile time,
  3418   // the check for UseSSE2 is needed here
  3419   if (strict_fp_requires_explicit_rounding && UseSSE < 2 && method()->is_strict() != callee->is_strict()) {
  3420     INLINE_BAILOUT("caller and callee have different strict fp requirements");
  3423   if (is_profiling() && !callee->ensure_method_data()) {
  3424     INLINE_BAILOUT("mdo allocation failed");
  3427   // now perform tests that are based on flag settings
  3428   if (callee->should_inline()) {
  3429     // ignore heuristic controls on inlining
  3430   } else {
  3431     if (inline_level() > MaxInlineLevel                         ) INLINE_BAILOUT("too-deep inlining");
  3432     if (recursive_inline_level(callee) > MaxRecursiveInlineLevel) INLINE_BAILOUT("too-deep recursive inlining");
  3433     if (callee->code_size_for_inlining() > max_inline_size()    ) INLINE_BAILOUT("callee is too large");
  3435     // don't inline throwable methods unless the inlining tree is rooted in a throwable class
  3436     if (callee->name() == ciSymbol::object_initializer_name() &&
  3437         callee->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
  3438       // Throwable constructor call
  3439       IRScope* top = scope();
  3440       while (top->caller() != NULL) {
  3441         top = top->caller();
  3443       if (!top->method()->holder()->is_subclass_of(ciEnv::current()->Throwable_klass())) {
  3444         INLINE_BAILOUT("don't inline Throwable constructors");
  3448     if (compilation()->env()->num_inlined_bytecodes() > DesiredMethodLimit) {
  3449       INLINE_BAILOUT("total inlining greater than DesiredMethodLimit");
  3453 #ifndef PRODUCT
  3454       // printing
  3455   if (PrintInlining) {
  3456     print_inline_result(callee, true);
  3458 #endif
  3460   // NOTE: Bailouts from this point on, which occur at the
  3461   // GraphBuilder level, do not cause bailout just of the inlining but
  3462   // in fact of the entire compilation.
  3464   BlockBegin* orig_block = block();
  3466   const int args_base = state()->stack_size() - callee->arg_size();
  3467   assert(args_base >= 0, "stack underflow during inlining");
  3469   // Insert null check if necessary
  3470   Value recv = NULL;
  3471   if (code() != Bytecodes::_invokestatic) {
  3472     // note: null check must happen even if first instruction of callee does
  3473     //       an implicit null check since the callee is in a different scope
  3474     //       and we must make sure exception handling does the right thing
  3475     assert(!callee->is_static(), "callee must not be static");
  3476     assert(callee->arg_size() > 0, "must have at least a receiver");
  3477     recv = state()->stack_at(args_base);
  3478     null_check(recv);
  3481   if (is_profiling()) {
  3482     // Note that we'd collect profile data in this method if we wanted it.
  3483     // this may be redundant here...
  3484     compilation()->set_would_profile(true);
  3486     if (profile_calls()) {
  3487       profile_call(recv, holder_known ? callee->holder() : NULL);
  3489     if (profile_inlined_calls()) {
  3490       profile_invocation(callee, copy_state_before());
  3494   // Introduce a new callee continuation point - if the callee has
  3495   // more than one return instruction or the return does not allow
  3496   // fall-through of control flow, all return instructions of the
  3497   // callee will need to be replaced by Goto's pointing to this
  3498   // continuation point.
  3499   BlockBegin* cont = block_at(next_bci());
  3500   bool continuation_existed = true;
  3501   if (cont == NULL) {
  3502     cont = new BlockBegin(next_bci());
  3503     // low number so that continuation gets parsed as early as possible
  3504     cont->set_depth_first_number(0);
  3505 #ifndef PRODUCT
  3506     if (PrintInitialBlockList) {
  3507       tty->print_cr("CFG: created block %d (bci %d) as continuation for inline at bci %d",
  3508                     cont->block_id(), cont->bci(), bci());
  3510 #endif
  3511     continuation_existed = false;
  3513   // Record number of predecessors of continuation block before
  3514   // inlining, to detect if inlined method has edges to its
  3515   // continuation after inlining.
  3516   int continuation_preds = cont->number_of_preds();
  3518   // Push callee scope
  3519   push_scope(callee, cont);
  3521   // the BlockListBuilder for the callee could have bailed out
  3522   CHECK_BAILOUT_(false);
  3524   // Temporarily set up bytecode stream so we can append instructions
  3525   // (only using the bci of this stream)
  3526   scope_data()->set_stream(scope_data()->parent()->stream());
  3528   // Pass parameters into callee state: add assignments
  3529   // note: this will also ensure that all arguments are computed before being passed
  3530   ValueStack* callee_state = state();
  3531   ValueStack* caller_state = state()->caller_state();
  3532   { int i = args_base;
  3533     while (i < caller_state->stack_size()) {
  3534       const int par_no = i - args_base;
  3535       Value  arg = caller_state->stack_at_inc(i);
  3536       // NOTE: take base() of arg->type() to avoid problems storing
  3537       // constants
  3538       store_local(callee_state, arg, arg->type()->base(), par_no);
  3542   // Remove args from stack.
  3543   // Note that we preserve locals state in case we can use it later
  3544   // (see use of pop_scope() below)
  3545   caller_state->truncate_stack(args_base);
  3546   assert(callee_state->stack_size() == 0, "callee stack must be empty");
  3548   Value lock;
  3549   BlockBegin* sync_handler;
  3551   // Inline the locking of the receiver if the callee is synchronized
  3552   if (callee->is_synchronized()) {
  3553     lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))
  3554                                : state()->local_at(0);
  3555     sync_handler = new BlockBegin(SynchronizationEntryBCI);
  3556     inline_sync_entry(lock, sync_handler);
  3559   if (compilation()->env()->dtrace_method_probes()) {
  3560     Values* args = new Values(1);
  3561     args->push(append(new Constant(new ObjectConstant(method()))));
  3562     append(new RuntimeCall(voidType, "dtrace_method_entry", CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_entry), args));
  3565   BlockBegin* callee_start_block = block_at(0);
  3566   if (callee_start_block != NULL) {
  3567     assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");
  3568     Goto* goto_callee = new Goto(callee_start_block, false);
  3569     // The state for this goto is in the scope of the callee, so use
  3570     // the entry bci for the callee instead of the call site bci.
  3571     append_with_bci(goto_callee, 0);
  3572     _block->set_end(goto_callee);
  3573     callee_start_block->merge(callee_state);
  3575     _last = _block = callee_start_block;
  3577     scope_data()->add_to_work_list(callee_start_block);
  3580   // Clear out bytecode stream
  3581   scope_data()->set_stream(NULL);
  3583   // Ready to resume parsing in callee (either in the same block we
  3584   // were in before or in the callee's start block)
  3585   iterate_all_blocks(callee_start_block == NULL);
  3587   // If we bailed out during parsing, return immediately (this is bad news)
  3588   if (bailed_out()) return false;
  3590   // iterate_all_blocks theoretically traverses in random order; in
  3591   // practice, we have only traversed the continuation if we are
  3592   // inlining into a subroutine
  3593   assert(continuation_existed ||
  3594          !continuation()->is_set(BlockBegin::was_visited_flag),
  3595          "continuation should not have been parsed yet if we created it");
  3597   // If we bailed out during parsing, return immediately (this is bad news)
  3598   CHECK_BAILOUT_(false);
  3600   // At this point we are almost ready to return and resume parsing of
  3601   // the caller back in the GraphBuilder. The only thing we want to do
  3602   // first is an optimization: during parsing of the callee we
  3603   // generated at least one Goto to the continuation block. If we
  3604   // generated exactly one, and if the inlined method spanned exactly
  3605   // one block (and we didn't have to Goto its entry), then we snip
  3606   // off the Goto to the continuation, allowing control to fall
  3607   // through back into the caller block and effectively performing
  3608   // block merging. This allows load elimination and CSE to take place
  3609   // across multiple callee scopes if they are relatively simple, and
  3610   // is currently essential to making inlining profitable.
  3611   if (   num_returns() == 1
  3612       && block() == orig_block
  3613       && block() == inline_cleanup_block()) {
  3614     _last = inline_cleanup_return_prev();
  3615     _state = inline_cleanup_state();
  3616   } else if (continuation_preds == cont->number_of_preds()) {
  3617     // Inlining caused that the instructions after the invoke in the
  3618     // caller are not reachable any more. So skip filling this block
  3619     // with instructions!
  3620     assert (cont == continuation(), "");
  3621     assert(_last && _last->as_BlockEnd(), "");
  3622     _skip_block = true;
  3623   } else {
  3624     // Resume parsing in continuation block unless it was already parsed.
  3625     // Note that if we don't change _last here, iteration in
  3626     // iterate_bytecodes_for_block will stop when we return.
  3627     if (!continuation()->is_set(BlockBegin::was_visited_flag)) {
  3628       // add continuation to work list instead of parsing it immediately
  3629       assert(_last && _last->as_BlockEnd(), "");
  3630       scope_data()->parent()->add_to_work_list(continuation());
  3631       _skip_block = true;
  3635   // Fill the exception handler for synchronized methods with instructions
  3636   if (callee->is_synchronized() && sync_handler->state() != NULL) {
  3637     fill_sync_handler(lock, sync_handler);
  3638   } else {
  3639     pop_scope();
  3642   compilation()->notice_inlined_method(callee);
  3644   return true;
  3648 void GraphBuilder::inline_bailout(const char* msg) {
  3649   assert(msg != NULL, "inline bailout msg must exist");
  3650   _inline_bailout_msg = msg;
  3654 void GraphBuilder::clear_inline_bailout() {
  3655   _inline_bailout_msg = NULL;
  3659 void GraphBuilder::push_root_scope(IRScope* scope, BlockList* bci2block, BlockBegin* start) {
  3660   ScopeData* data = new ScopeData(NULL);
  3661   data->set_scope(scope);
  3662   data->set_bci2block(bci2block);
  3663   _scope_data = data;
  3664   _block = start;
  3668 void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) {
  3669   IRScope* callee_scope = new IRScope(compilation(), scope(), bci(), callee, -1, false);
  3670   scope()->add_callee(callee_scope);
  3672   BlockListBuilder blb(compilation(), callee_scope, -1);
  3673   CHECK_BAILOUT();
  3675   if (!blb.bci2block()->at(0)->is_set(BlockBegin::parser_loop_header_flag)) {
  3676     // this scope can be inlined directly into the caller so remove
  3677     // the block at bci 0.
  3678     blb.bci2block()->at_put(0, NULL);
  3681   set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));
  3683   ScopeData* data = new ScopeData(scope_data());
  3684   data->set_scope(callee_scope);
  3685   data->set_bci2block(blb.bci2block());
  3686   data->set_continuation(continuation);
  3687   _scope_data = data;
  3691 void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest_bci) {
  3692   ScopeData* data = new ScopeData(scope_data());
  3693   data->set_parsing_jsr();
  3694   data->set_jsr_entry_bci(jsr_dest_bci);
  3695   data->set_jsr_return_address_local(-1);
  3696   // Must clone bci2block list as we will be mutating it in order to
  3697   // properly clone all blocks in jsr region as well as exception
  3698   // handlers containing rets
  3699   BlockList* new_bci2block = new BlockList(bci2block()->length());
  3700   new_bci2block->push_all(bci2block());
  3701   data->set_bci2block(new_bci2block);
  3702   data->set_scope(scope());
  3703   data->setup_jsr_xhandlers();
  3704   data->set_continuation(continuation());
  3705   data->set_jsr_continuation(jsr_continuation);
  3706   _scope_data = data;
  3710 void GraphBuilder::pop_scope() {
  3711   int number_of_locks = scope()->number_of_locks();
  3712   _scope_data = scope_data()->parent();
  3713   // accumulate minimum number of monitor slots to be reserved
  3714   scope()->set_min_number_of_locks(number_of_locks);
  3718 void GraphBuilder::pop_scope_for_jsr() {
  3719   _scope_data = scope_data()->parent();
  3722 bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
  3723   if (InlineUnsafeOps) {
  3724     Values* args = state()->pop_arguments(callee->arg_size());
  3725     null_check(args->at(0));
  3726     Instruction* offset = args->at(2);
  3727 #ifndef _LP64
  3728     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
  3729 #endif
  3730     Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
  3731     push(op->type(), op);
  3732     compilation()->set_has_unsafe_access(true);
  3734   return InlineUnsafeOps;
  3738 bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
  3739   if (InlineUnsafeOps) {
  3740     Values* args = state()->pop_arguments(callee->arg_size());
  3741     null_check(args->at(0));
  3742     Instruction* offset = args->at(2);
  3743 #ifndef _LP64
  3744     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
  3745 #endif
  3746     Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, args->at(3), is_volatile));
  3747     compilation()->set_has_unsafe_access(true);
  3748     kill_all();
  3750   return InlineUnsafeOps;
  3754 bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
  3755   if (InlineUnsafeOps) {
  3756     Values* args = state()->pop_arguments(callee->arg_size());
  3757     null_check(args->at(0));
  3758     Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
  3759     push(op->type(), op);
  3760     compilation()->set_has_unsafe_access(true);
  3762   return InlineUnsafeOps;
  3766 bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
  3767   if (InlineUnsafeOps) {
  3768     Values* args = state()->pop_arguments(callee->arg_size());
  3769     null_check(args->at(0));
  3770     Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
  3771     compilation()->set_has_unsafe_access(true);
  3773   return InlineUnsafeOps;
  3777 bool GraphBuilder::append_unsafe_prefetch(ciMethod* callee, bool is_static, bool is_store) {
  3778   if (InlineUnsafeOps) {
  3779     Values* args = state()->pop_arguments(callee->arg_size());
  3780     int obj_arg_index = 1; // Assume non-static case
  3781     if (is_static) {
  3782       obj_arg_index = 0;
  3783     } else {
  3784       null_check(args->at(0));
  3786     Instruction* offset = args->at(obj_arg_index + 1);
  3787 #ifndef _LP64
  3788     offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
  3789 #endif
  3790     Instruction* op = is_store ? append(new UnsafePrefetchWrite(args->at(obj_arg_index), offset))
  3791                                : append(new UnsafePrefetchRead (args->at(obj_arg_index), offset));
  3792     compilation()->set_has_unsafe_access(true);
  3794   return InlineUnsafeOps;
  3798 void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
  3799   ValueStack* state_before = copy_state_for_exception();
  3800   ValueType* result_type = as_ValueType(callee->return_type());
  3801   assert(result_type->is_int(), "int result");
  3802   Values* args = state()->pop_arguments(callee->arg_size());
  3804   // Pop off some args to speically handle, then push back
  3805   Value newval = args->pop();
  3806   Value cmpval = args->pop();
  3807   Value offset = args->pop();
  3808   Value src = args->pop();
  3809   Value unsafe_obj = args->pop();
  3811   // Separately handle the unsafe arg. It is not needed for code
  3812   // generation, but must be null checked
  3813   null_check(unsafe_obj);
  3815 #ifndef _LP64
  3816   offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
  3817 #endif
  3819   args->push(src);
  3820   args->push(offset);
  3821   args->push(cmpval);
  3822   args->push(newval);
  3824   // An unsafe CAS can alias with other field accesses, but we don't
  3825   // know which ones so mark the state as no preserved.  This will
  3826   // cause CSE to invalidate memory across it.
  3827   bool preserves_state = false;
  3828   Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);
  3829   append_split(result);
  3830   push(result_type, result);
  3831   compilation()->set_has_unsafe_access(true);
  3835 #ifndef PRODUCT
  3836 void GraphBuilder::print_inline_result(ciMethod* callee, bool res) {
  3837   CompileTask::print_inlining(callee, scope()->level(), bci(), _inline_bailout_msg);
  3838   if (res && CIPrintMethodCodes) {
  3839     callee->print_codes();
  3844 void GraphBuilder::print_stats() {
  3845   vmap()->print();
  3847 #endif // PRODUCT
  3849 void GraphBuilder::profile_call(Value recv, ciKlass* known_holder) {
  3850   append(new ProfileCall(method(), bci(), recv, known_holder));
  3853 void GraphBuilder::profile_invocation(ciMethod* callee, ValueStack* state) {
  3854   append(new ProfileInvoke(callee, state));

mercurial