src/share/vm/c1/c1_GraphBuilder.cpp

changeset 2174
f02a8bbe6ed4
parent 2146
3a294e483abc
child 2180
80c9354976b0
     1.1 --- a/src/share/vm/c1/c1_GraphBuilder.cpp	Wed Sep 22 23:51:03 2010 -0700
     1.2 +++ b/src/share/vm/c1/c1_GraphBuilder.cpp	Tue Dec 29 19:08:54 2009 +0100
     1.3 @@ -659,7 +659,6 @@
     1.4    , _jsr_xhandlers(NULL)
     1.5    , _caller_stack_size(-1)
     1.6    , _continuation(NULL)
     1.7 -  , _continuation_state(NULL)
     1.8    , _num_returns(0)
     1.9    , _cleanup_block(NULL)
    1.10    , _cleanup_return_prev(NULL)
    1.11 @@ -795,14 +794,6 @@
    1.12    if (i >= -1) worklist->at_put(i + 1, top);
    1.13  }
    1.14  
    1.15 -int GraphBuilder::ScopeData::caller_stack_size() const {
    1.16 -  ValueStack* state = scope()->caller_state();
    1.17 -  if (state == NULL) {
    1.18 -    return 0;
    1.19 -  }
    1.20 -  return state->stack_size();
    1.21 -}
    1.22 -
    1.23  
    1.24  BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {
    1.25    if (is_work_list_empty()) {
    1.26 @@ -880,7 +871,7 @@
    1.27          ciObject* obj = con.as_object();
    1.28          if (!obj->is_loaded()
    1.29              || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
    1.30 -          patch_state = state()->copy();
    1.31 +          patch_state = copy_state_before();
    1.32            t = new ObjectConstant(obj);
    1.33          } else {
    1.34            assert(!obj->is_klass(), "must be java_mirror of klass");
    1.35 @@ -902,7 +893,8 @@
    1.36  
    1.37  
    1.38  void GraphBuilder::load_local(ValueType* type, int index) {
    1.39 -  Value x = state()->load_local(index);
    1.40 +  Value x = state()->local_at(index);
    1.41 +  assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
    1.42    push(type, x);
    1.43  }
    1.44  
    1.45 @@ -942,19 +934,21 @@
    1.46  
    1.47  
    1.48  void GraphBuilder::load_indexed(BasicType type) {
    1.49 +  ValueStack* state_before = copy_state_for_exception();
    1.50    Value index = ipop();
    1.51    Value array = apop();
    1.52    Value length = NULL;
    1.53    if (CSEArrayLength ||
    1.54        (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
    1.55        (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
    1.56 -    length = append(new ArrayLength(array, lock_stack()));
    1.57 +    length = append(new ArrayLength(array, state_before));
    1.58    }
    1.59 -  push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, lock_stack())));
    1.60 +  push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
    1.61  }
    1.62  
    1.63  
    1.64  void GraphBuilder::store_indexed(BasicType type) {
    1.65 +  ValueStack* state_before = copy_state_for_exception();
    1.66    Value value = pop(as_ValueType(type));
    1.67    Value index = ipop();
    1.68    Value array = apop();
    1.69 @@ -962,9 +956,9 @@
    1.70    if (CSEArrayLength ||
    1.71        (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
    1.72        (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
    1.73 -    length = append(new ArrayLength(array, lock_stack()));
    1.74 +    length = append(new ArrayLength(array, state_before));
    1.75    }
    1.76 -  StoreIndexed* result = new StoreIndexed(array, index, length, type, value, lock_stack());
    1.77 +  StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before);
    1.78    append(result);
    1.79    _memory->store_value(value);
    1.80  
    1.81 @@ -1063,12 +1057,12 @@
    1.82  }
    1.83  
    1.84  
    1.85 -void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* stack) {
    1.86 +void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {
    1.87    Value y = pop(type);
    1.88    Value x = pop(type);
    1.89    // NOTE: strictfp can be queried from current method since we don't
    1.90    // inline methods with differing strictfp bits
    1.91 -  Value res = new ArithmeticOp(code, x, y, method()->is_strict(), stack);
    1.92 +  Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);
    1.93    // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level
    1.94    res = append(res);
    1.95    if (method()->is_strict()) {
    1.96 @@ -1132,7 +1126,7 @@
    1.97  
    1.98  
    1.99  void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {
   1.100 -  ValueStack* state_before = state()->copy();
   1.101 +  ValueStack* state_before = copy_state_before();
   1.102    Value y = pop(type);
   1.103    Value x = pop(type);
   1.104    ipush(append(new CompareOp(code, x, y, state_before)));
   1.105 @@ -1217,7 +1211,7 @@
   1.106  
   1.107  void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
   1.108    Value y = append(new Constant(intZero));
   1.109 -  ValueStack* state_before = state()->copy();
   1.110 +  ValueStack* state_before = copy_state_before();
   1.111    Value x = ipop();
   1.112    if_node(x, cond, y, state_before);
   1.113  }
   1.114 @@ -1225,14 +1219,14 @@
   1.115  
   1.116  void GraphBuilder::if_null(ValueType* type, If::Condition cond) {
   1.117    Value y = append(new Constant(objectNull));
   1.118 -  ValueStack* state_before = state()->copy();
   1.119 +  ValueStack* state_before = copy_state_before();
   1.120    Value x = apop();
   1.121    if_node(x, cond, y, state_before);
   1.122  }
   1.123  
   1.124  
   1.125  void GraphBuilder::if_same(ValueType* type, If::Condition cond) {
   1.126 -  ValueStack* state_before = state()->copy();
   1.127 +  ValueStack* state_before = copy_state_before();
   1.128    Value y = pop(type);
   1.129    Value x = pop(type);
   1.130    if_node(x, cond, y, state_before);
   1.131 @@ -1282,7 +1276,7 @@
   1.132      BlockBegin* tsux = block_at(bci() + switch_->dest_offset_at(0));
   1.133      BlockBegin* fsux = block_at(bci() + switch_->default_offset());
   1.134      bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
   1.135 -    ValueStack* state_before = is_bb ? state() : NULL;
   1.136 +    ValueStack* state_before = is_bb ? copy_state_before() : NULL;
   1.137      append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
   1.138    } else {
   1.139      // collect successors
   1.140 @@ -1295,7 +1289,7 @@
   1.141      }
   1.142      // add default successor
   1.143      sux->at_put(i, block_at(bci() + switch_->default_offset()));
   1.144 -    ValueStack* state_before = has_bb ? state() : NULL;
   1.145 +    ValueStack* state_before = has_bb ? copy_state_before() : NULL;
   1.146      append(new TableSwitch(ipop(), sux, switch_->low_key(), state_before, has_bb));
   1.147    }
   1.148  }
   1.149 @@ -1314,7 +1308,7 @@
   1.150      BlockBegin* tsux = block_at(bci() + pair->offset());
   1.151      BlockBegin* fsux = block_at(bci() + switch_->default_offset());
   1.152      bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
   1.153 -    ValueStack* state_before = is_bb ? state() : NULL;
   1.154 +    ValueStack* state_before = is_bb ? copy_state_before() : NULL;
   1.155      append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
   1.156    } else {
   1.157      // collect successors & keys
   1.158 @@ -1330,7 +1324,7 @@
   1.159      }
   1.160      // add default successor
   1.161      sux->at_put(i, block_at(bci() + switch_->default_offset()));
   1.162 -    ValueStack* state_before = has_bb ? state() : NULL;
   1.163 +    ValueStack* state_before = has_bb ? copy_state_before() : NULL;
   1.164      append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));
   1.165    }
   1.166  }
   1.167 @@ -1340,7 +1334,7 @@
   1.168    // the registration on return.
   1.169  
   1.170    // Gather some type information about the receiver
   1.171 -  Value receiver = state()->load_local(0);
   1.172 +  Value receiver = state()->local_at(0);
   1.173    assert(receiver != NULL, "must have a receiver");
   1.174    ciType* declared_type = receiver->declared_type();
   1.175    ciType* exact_type = receiver->exact_type();
   1.176 @@ -1373,10 +1367,11 @@
   1.177  
   1.178    if (needs_check) {
   1.179      // Perform the registration of finalizable objects.
   1.180 +    ValueStack* state_before = copy_state_for_exception();
   1.181      load_local(objectType, 0);
   1.182      append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
   1.183                                 state()->pop_arguments(1),
   1.184 -                               true, lock_stack(), true));
   1.185 +                               true, state_before, true));
   1.186    }
   1.187  }
   1.188  
   1.189 @@ -1395,12 +1390,14 @@
   1.190      // If the inlined method is synchronized, the monitor must be
   1.191      // released before we jump to the continuation block.
   1.192      if (method()->is_synchronized()) {
   1.193 -      int i = state()->caller_state()->locks_size();
   1.194 -      assert(state()->locks_size() == i + 1, "receiver must be locked here");
   1.195 -      monitorexit(state()->lock_at(i), SynchronizationEntryBCI);
   1.196 +      assert(state()->locks_size() == 1, "receiver must be locked here");
   1.197 +      monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
   1.198      }
   1.199  
   1.200 -    state()->truncate_stack(caller_stack_size());
   1.201 +    // State at end of inlined method is the state of the caller
   1.202 +    // without the method parameters on stack, including the
   1.203 +    // return value, if any, of the inlined method on operand stack.
   1.204 +    set_state(state()->caller_state()->copy_for_parsing());
   1.205      if (x != NULL) {
   1.206        state()->push(x->type(), x);
   1.207      }
   1.208 @@ -1412,14 +1409,6 @@
   1.209        set_inline_cleanup_info(_block, _last, state());
   1.210      }
   1.211  
   1.212 -    // State at end of inlined method is the state of the caller
   1.213 -    // without the method parameters on stack, including the
   1.214 -    // return value, if any, of the inlined method on operand stack.
   1.215 -    set_state(scope_data()->continuation_state()->copy());
   1.216 -    if (x) {
   1.217 -      state()->push(x->type(), x);
   1.218 -    }
   1.219 -
   1.220      // The current bci() is in the wrong scope, so use the bci() of
   1.221      // the continuation point.
   1.222      append_with_bci(goto_callee, scope_data()->continuation()->bci());
   1.223 @@ -1455,11 +1444,11 @@
   1.224                           field->will_link(method()->holder(), code);
   1.225    const bool is_initialized = is_loaded && holder->is_initialized();
   1.226  
   1.227 -  ValueStack* state_copy = NULL;
   1.228 +  ValueStack* state_before = NULL;
   1.229    if (!is_initialized || PatchALot) {
   1.230      // save state before instruction for debug info when
   1.231      // deoptimization happens during patching
   1.232 -    state_copy = state()->copy();
   1.233 +    state_before = copy_state_before();
   1.234    }
   1.235  
   1.236    Value obj = NULL;
   1.237 @@ -1468,9 +1457,9 @@
   1.238      // fully initialized and resolved in this constant pool.  The will_link test
   1.239      // above essentially checks if this class is resolved in this constant pool
   1.240      // so, the is_initialized flag should be suffiect.
   1.241 -    if (state_copy != NULL) {
   1.242 +    if (state_before != NULL) {
   1.243        // build a patching constant
   1.244 -      obj = new Constant(new ClassConstant(holder), state_copy);
   1.245 +      obj = new Constant(new ClassConstant(holder), state_before);
   1.246      } else {
   1.247        obj = new Constant(new ClassConstant(holder));
   1.248      }
   1.249 @@ -1499,25 +1488,32 @@
   1.250        }
   1.251        if (constant != NULL) {
   1.252          push(type, append(constant));
   1.253 -        state_copy = NULL; // Not a potential deoptimization point (see set_state_before logic below)
   1.254        } else {
   1.255 +        if (state_before == NULL) {
   1.256 +          state_before = copy_state_for_exception();
   1.257 +        }
   1.258          push(type, append(new LoadField(append(obj), offset, field, true,
   1.259 -                                        lock_stack(), state_copy, is_loaded, is_initialized)));
   1.260 +                                        state_before, is_loaded, is_initialized)));
   1.261        }
   1.262        break;
   1.263      }
   1.264      case Bytecodes::_putstatic:
   1.265        { Value val = pop(type);
   1.266 -        append(new StoreField(append(obj), offset, field, val, true, lock_stack(), state_copy, is_loaded, is_initialized));
   1.267 +        if (state_before == NULL) {
   1.268 +          state_before = copy_state_for_exception();
   1.269 +        }
   1.270 +        append(new StoreField(append(obj), offset, field, val, true, state_before, is_loaded, is_initialized));
   1.271        }
   1.272        break;
   1.273      case Bytecodes::_getfield :
   1.274        {
   1.275 -        LoadField* load = new LoadField(apop(), offset, field, false, lock_stack(), state_copy, is_loaded, true);
   1.276 +        if (state_before == NULL) {
   1.277 +          state_before = copy_state_for_exception();
   1.278 +        }
   1.279 +        LoadField* load = new LoadField(apop(), offset, field, false, state_before, is_loaded, true);
   1.280          Value replacement = is_loaded ? _memory->load(load) : load;
   1.281          if (replacement != load) {
   1.282 -          assert(replacement->bci() != -99 || replacement->as_Phi() || replacement->as_Local(),
   1.283 -                 "should already by linked");
   1.284 +          assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
   1.285            push(type, replacement);
   1.286          } else {
   1.287            push(type, append(load));
   1.288 @@ -1527,7 +1523,10 @@
   1.289  
   1.290      case Bytecodes::_putfield :
   1.291        { Value val = pop(type);
   1.292 -        StoreField* store = new StoreField(apop(), offset, field, val, false, lock_stack(), state_copy, is_loaded, true);
   1.293 +        if (state_before == NULL) {
   1.294 +          state_before = copy_state_for_exception();
   1.295 +        }
   1.296 +        StoreField* store = new StoreField(apop(), offset, field, val, false, state_before, is_loaded, true);
   1.297          if (is_loaded) store = _memory->store(store);
   1.298          if (store != NULL) {
   1.299            append(store);
   1.300 @@ -1647,7 +1646,7 @@
   1.301            actual_recv = target->holder();
   1.302  
   1.303            // insert a check it's really the expected class.
   1.304 -          CheckCast* c = new CheckCast(klass, receiver, NULL);
   1.305 +          CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());
   1.306            c->set_incompatible_class_change_check();
   1.307            c->set_direct_compare(klass->is_final());
   1.308            append_split(c);
   1.309 @@ -1732,7 +1731,7 @@
   1.310  
   1.311    // We require the debug info to be the "state before" because
   1.312    // invokedynamics may deoptimize.
   1.313 -  ValueStack* state_before = is_invokedynamic ? state()->copy() : NULL;
   1.314 +  ValueStack* state_before = is_invokedynamic ? copy_state_before() : copy_state_exhandling();
   1.315  
   1.316    Values* args = state()->pop_arguments(target->arg_size_no_receiver());
   1.317    Value recv = has_receiver ? apop() : NULL;
   1.318 @@ -1795,24 +1794,26 @@
   1.319  
   1.320  
   1.321  void GraphBuilder::new_instance(int klass_index) {
   1.322 +  ValueStack* state_before = copy_state_exhandling();
   1.323    bool will_link;
   1.324    ciKlass* klass = stream()->get_klass(will_link);
   1.325    assert(klass->is_instance_klass(), "must be an instance klass");
   1.326 -  NewInstance* new_instance = new NewInstance(klass->as_instance_klass());
   1.327 +  NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
   1.328    _memory->new_instance(new_instance);
   1.329    apush(append_split(new_instance));
   1.330  }
   1.331  
   1.332  
   1.333  void GraphBuilder::new_type_array() {
   1.334 -  apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index())));
   1.335 +  ValueStack* state_before = copy_state_exhandling();
   1.336 +  apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
   1.337  }
   1.338  
   1.339  
   1.340  void GraphBuilder::new_object_array() {
   1.341    bool will_link;
   1.342    ciKlass* klass = stream()->get_klass(will_link);
   1.343 -  ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL;
   1.344 +  ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
   1.345    NewArray* n = new NewObjectArray(klass, ipop(), state_before);
   1.346    apush(append_split(n));
   1.347  }
   1.348 @@ -1838,7 +1839,7 @@
   1.349  void GraphBuilder::check_cast(int klass_index) {
   1.350    bool will_link;
   1.351    ciKlass* klass = stream()->get_klass(will_link);
   1.352 -  ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL;
   1.353 +  ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
   1.354    CheckCast* c = new CheckCast(klass, apop(), state_before);
   1.355    apush(append_split(c));
   1.356    c->set_direct_compare(direct_compare(klass));
   1.357 @@ -1859,7 +1860,7 @@
   1.358  void GraphBuilder::instance_of(int klass_index) {
   1.359    bool will_link;
   1.360    ciKlass* klass = stream()->get_klass(will_link);
   1.361 -  ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL;
   1.362 +  ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
   1.363    InstanceOf* i = new InstanceOf(klass, apop(), state_before);
   1.364    ipush(append_split(i));
   1.365    i->set_direct_compare(direct_compare(klass));
   1.366 @@ -1879,25 +1880,13 @@
   1.367  
   1.368  void GraphBuilder::monitorenter(Value x, int bci) {
   1.369    // save state before locking in case of deoptimization after a NullPointerException
   1.370 -  ValueStack* lock_stack_before = lock_stack();
   1.371 -  append_with_bci(new MonitorEnter(x, state()->lock(scope(), x), lock_stack_before), bci);
   1.372 +  ValueStack* state_before = copy_state_for_exception_with_bci(bci);
   1.373 +  append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
   1.374    kill_all();
   1.375  }
   1.376  
   1.377  
   1.378  void GraphBuilder::monitorexit(Value x, int bci) {
   1.379 -  // Note: the comment below is only relevant for the case where we do
   1.380 -  // not deoptimize due to asynchronous exceptions (!(DeoptC1 &&
   1.381 -  // DeoptOnAsyncException), which is not used anymore)
   1.382 -
   1.383 -  // Note: Potentially, the monitor state in an exception handler
   1.384 -  //       can be wrong due to wrong 'initialization' of the handler
   1.385 -  //       via a wrong asynchronous exception path. This can happen,
   1.386 -  //       if the exception handler range for asynchronous exceptions
   1.387 -  //       is too long (see also java bug 4327029, and comment in
   1.388 -  //       GraphBuilder::handle_exception()). This may cause 'under-
   1.389 -  //       flow' of the monitor stack => bailout instead.
   1.390 -  if (state()->locks_size() < 1) BAILOUT("monitor stack underflow");
   1.391    append_with_bci(new MonitorExit(x, state()->unlock()), bci);
   1.392    kill_all();
   1.393  }
   1.394 @@ -1906,7 +1895,7 @@
   1.395  void GraphBuilder::new_multi_array(int dimensions) {
   1.396    bool will_link;
   1.397    ciKlass* klass = stream()->get_klass(will_link);
   1.398 -  ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL;
   1.399 +  ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
   1.400  
   1.401    Values* dims = new Values(dimensions, NULL);
   1.402    // fill in all dimensions
   1.403 @@ -1921,8 +1910,10 @@
   1.404  void GraphBuilder::throw_op(int bci) {
   1.405    // We require that the debug info for a Throw be the "state before"
   1.406    // the Throw (i.e., exception oop is still on TOS)
   1.407 -  ValueStack* state_before = state()->copy();
   1.408 +  ValueStack* state_before = copy_state_before_with_bci(bci);
   1.409    Throw* t = new Throw(apop(), state_before);
   1.410 +  // operand stack not needed after a throw
   1.411 +  state()->truncate_stack(0);
   1.412    append_with_bci(t, bci);
   1.413  }
   1.414  
   1.415 @@ -1947,60 +1938,62 @@
   1.416  Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {
   1.417    Canonicalizer canon(compilation(), instr, bci);
   1.418    Instruction* i1 = canon.canonical();
   1.419 -  if (i1->bci() != -99) {
   1.420 +  if (i1->is_linked() || !i1->can_be_linked()) {
   1.421      // Canonicalizer returned an instruction which was already
   1.422      // appended so simply return it.
   1.423      return i1;
   1.424 -  } else if (UseLocalValueNumbering) {
   1.425 +  }
   1.426 +
   1.427 +  if (UseLocalValueNumbering) {
   1.428      // Lookup the instruction in the ValueMap and add it to the map if
   1.429      // it's not found.
   1.430      Instruction* i2 = vmap()->find_insert(i1);
   1.431      if (i2 != i1) {
   1.432        // found an entry in the value map, so just return it.
   1.433 -      assert(i2->bci() != -1, "should already be linked");
   1.434 +      assert(i2->is_linked(), "should already be linked");
   1.435        return i2;
   1.436      }
   1.437      ValueNumberingEffects vne(vmap());
   1.438      i1->visit(&vne);
   1.439    }
   1.440  
   1.441 -  if (i1->as_Phi() == NULL && i1->as_Local() == NULL) {
   1.442 -    // i1 was not eliminated => append it
   1.443 -    assert(i1->next() == NULL, "shouldn't already be linked");
   1.444 -    _last = _last->set_next(i1, canon.bci());
   1.445 -    if (++_instruction_count >= InstructionCountCutoff
   1.446 -        && !bailed_out()) {
   1.447 -      // set the bailout state but complete normal processing.  We
   1.448 -      // might do a little more work before noticing the bailout so we
   1.449 -      // want processing to continue normally until it's noticed.
   1.450 -      bailout("Method and/or inlining is too large");
   1.451 +  // i1 was not eliminated => append it
   1.452 +  assert(i1->next() == NULL, "shouldn't already be linked");
   1.453 +  _last = _last->set_next(i1, canon.bci());
   1.454 +
   1.455 +  if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {
   1.456 +    // set the bailout state but complete normal processing.  We
   1.457 +    // might do a little more work before noticing the bailout so we
   1.458 +    // want processing to continue normally until it's noticed.
   1.459 +    bailout("Method and/or inlining is too large");
   1.460 +  }
   1.461 +
   1.462 +#ifndef PRODUCT
   1.463 +  if (PrintIRDuringConstruction) {
   1.464 +    InstructionPrinter ip;
   1.465 +    ip.print_line(i1);
   1.466 +    if (Verbose) {
   1.467 +      state()->print();
   1.468      }
   1.469 -
   1.470 -#ifndef PRODUCT
   1.471 -    if (PrintIRDuringConstruction) {
   1.472 -      InstructionPrinter ip;
   1.473 -      ip.print_line(i1);
   1.474 -      if (Verbose) {
   1.475 -        state()->print();
   1.476 +  }
   1.477 +#endif
   1.478 +
   1.479 +  // save state after modification of operand stack for StateSplit instructions
   1.480 +  StateSplit* s = i1->as_StateSplit();
   1.481 +  if (s != NULL) {
   1.482 +    if (EliminateFieldAccess) {
   1.483 +      Intrinsic* intrinsic = s->as_Intrinsic();
   1.484 +      if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
   1.485 +        _memory->kill();
   1.486        }
   1.487      }
   1.488 -#endif
   1.489 -    assert(_last == i1, "adjust code below");
   1.490 -    StateSplit* s = i1->as_StateSplit();
   1.491 -    if (s != NULL && i1->as_BlockEnd() == NULL) {
   1.492 -      if (EliminateFieldAccess) {
   1.493 -        Intrinsic* intrinsic = s->as_Intrinsic();
   1.494 -        if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
   1.495 -          _memory->kill();
   1.496 -        }
   1.497 -      }
   1.498 -      s->set_state(state()->copy());
   1.499 -    }
   1.500 -    // set up exception handlers for this instruction if necessary
   1.501 -    if (i1->can_trap()) {
   1.502 -      assert(exception_state() != NULL || !has_handler(), "must have setup exception state");
   1.503 -      i1->set_exception_handlers(handle_exception(bci));
   1.504 -    }
   1.505 +    s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));
   1.506 +  }
   1.507 +
   1.508 +  // set up exception handlers for this instruction if necessary
   1.509 +  if (i1->can_trap()) {
   1.510 +    i1->set_exception_handlers(handle_exception(i1));
   1.511 +    assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
   1.512    }
   1.513    return i1;
   1.514  }
   1.515 @@ -2032,26 +2025,30 @@
   1.516        }
   1.517      }
   1.518    }
   1.519 -  append(new NullCheck(value, lock_stack()));
   1.520 +  append(new NullCheck(value, copy_state_for_exception()));
   1.521  }
   1.522  
   1.523  
   1.524  
   1.525 -XHandlers* GraphBuilder::handle_exception(int cur_bci) {
   1.526 -  // fast path if it is guaranteed that no exception handlers are present
   1.527 -  if (!has_handler()) {
   1.528 -    // TODO: check if return NULL is possible (avoids empty lists)
   1.529 +XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
   1.530 +  if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
   1.531 +    assert(instruction->exception_state() == NULL
   1.532 +           || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
   1.533 +           || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->jvmti_can_access_local_variables()),
   1.534 +           "exception_state should be of exception kind");
   1.535      return new XHandlers();
   1.536    }
   1.537  
   1.538    XHandlers*  exception_handlers = new XHandlers();
   1.539    ScopeData*  cur_scope_data = scope_data();
   1.540 -  ValueStack* s = exception_state();
   1.541 +  ValueStack* cur_state = instruction->state_before();
   1.542 +  ValueStack* prev_state = NULL;
   1.543    int scope_count = 0;
   1.544  
   1.545 -  assert(s != NULL, "exception state must be set");
   1.546 +  assert(cur_state != NULL, "state_before must be set");
   1.547    do {
   1.548 -    assert(cur_scope_data->scope() == s->scope(), "scopes do not match");
   1.549 +    int cur_bci = cur_state->bci();
   1.550 +    assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
   1.551      assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
   1.552  
   1.553      // join with all potential exception handlers
   1.554 @@ -2075,10 +2072,15 @@
   1.555  
   1.556          // previously this was a BAILOUT, but this is not necessary
   1.557          // now because asynchronous exceptions are not handled this way.
   1.558 -        assert(entry->state() == NULL || s->locks_size() == entry->state()->locks_size(), "locks do not match");
   1.559 +        assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");
   1.560  
   1.561          // xhandler start with an empty expression stack
   1.562 -        s->truncate_stack(cur_scope_data->caller_stack_size());
   1.563 +        if (cur_state->stack_size() != 0) {
   1.564 +          cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
   1.565 +        }
   1.566 +        if (instruction->exception_state() == NULL) {
   1.567 +          instruction->set_exception_state(cur_state);
   1.568 +        }
   1.569  
   1.570          // Note: Usually this join must work. However, very
   1.571          // complicated jsr-ret structures where we don't ret from
   1.572 @@ -2087,12 +2089,12 @@
   1.573          // The only test case we've seen so far which exhibits this
   1.574          // problem is caught by the infinite recursion test in
   1.575          // GraphBuilder::jsr() if the join doesn't work.
   1.576 -        if (!entry->try_merge(s)) {
   1.577 +        if (!entry->try_merge(cur_state)) {
   1.578            BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);
   1.579          }
   1.580  
   1.581          // add current state for correct handling of phi functions at begin of xhandler
   1.582 -        int phi_operand = entry->add_exception_state(s);
   1.583 +        int phi_operand = entry->add_exception_state(cur_state);
   1.584  
   1.585          // add entry to the list of xhandlers of this block
   1.586          _block->add_exception_handler(entry);
   1.587 @@ -2119,26 +2121,39 @@
   1.588        }
   1.589      }
   1.590  
   1.591 +    if (exception_handlers->length() == 0) {
   1.592 +      // This scope and all callees do not handle exceptions, so the local
   1.593 +      // variables of this scope are not needed. However, the scope itself is
   1.594 +      // required for a correct exception stack trace -> clear out the locals.
   1.595 +      if (_compilation->env()->jvmti_can_access_local_variables()) {
   1.596 +        cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
   1.597 +      } else {
   1.598 +        cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());
   1.599 +      }
   1.600 +      if (prev_state != NULL) {
   1.601 +        prev_state->set_caller_state(cur_state);
   1.602 +      }
   1.603 +      if (instruction->exception_state() == NULL) {
   1.604 +        instruction->set_exception_state(cur_state);
   1.605 +      }
   1.606 +    }
   1.607 +
   1.608      // Set up iteration for next time.
   1.609      // If parsing a jsr, do not grab exception handlers from the
   1.610      // parent scopes for this method (already got them, and they
   1.611      // needed to be cloned)
   1.612 -    if (cur_scope_data->parsing_jsr()) {
   1.613 -      IRScope* tmp_scope = cur_scope_data->scope();
   1.614 -      while (cur_scope_data->parent() != NULL &&
   1.615 -             cur_scope_data->parent()->scope() == tmp_scope) {
   1.616 -        cur_scope_data = cur_scope_data->parent();
   1.617 -      }
   1.618 +
   1.619 +    while (cur_scope_data->parsing_jsr()) {
   1.620 +      cur_scope_data = cur_scope_data->parent();
   1.621      }
   1.622 -    if (cur_scope_data != NULL) {
   1.623 -      if (cur_scope_data->parent() != NULL) {
   1.624 -        // must use pop_scope instead of caller_state to preserve all monitors
   1.625 -        s = s->pop_scope();
   1.626 -      }
   1.627 -      cur_bci = cur_scope_data->scope()->caller_bci();
   1.628 -      cur_scope_data = cur_scope_data->parent();
   1.629 -      scope_count++;
   1.630 -    }
   1.631 +
   1.632 +    assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
   1.633 +    assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");
   1.634 +
   1.635 +    prev_state = cur_state;
   1.636 +    cur_state = cur_state->caller_state();
   1.637 +    cur_scope_data = cur_scope_data->parent();
   1.638 +    scope_count++;
   1.639    } while (cur_scope_data != NULL);
   1.640  
   1.641    return exception_handlers;
   1.642 @@ -2243,14 +2258,10 @@
   1.643    );
   1.644  
   1.645    ValueStack* state = b->state()->caller_state();
   1.646 -  int index;
   1.647 -  Value value;
   1.648 -  for_each_state(state) {
   1.649 -    for_each_local_value(state, index, value) {
   1.650 -      Phi* phi = value->as_Phi();
   1.651 -      assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
   1.652 -    }
   1.653 -  }
   1.654 +  for_each_state_value(state, value,
   1.655 +    Phi* phi = value->as_Phi();
   1.656 +    assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
   1.657 +  );
   1.658  #endif
   1.659  }
   1.660  
   1.661 @@ -2265,7 +2276,7 @@
   1.662    // setup iteration
   1.663    kill_all();
   1.664    _block = beg;
   1.665 -  _state = beg->state()->copy();
   1.666 +  _state = beg->state()->copy_for_parsing();
   1.667    _last  = beg;
   1.668    iterate_bytecodes_for_block(beg->bci());
   1.669  }
   1.670 @@ -2301,14 +2312,7 @@
   1.671    while (!bailed_out() && last()->as_BlockEnd() == NULL &&
   1.672           (code = stream()->next()) != ciBytecodeStream::EOBC() &&
   1.673           (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {
   1.674 -
   1.675 -    if (has_handler() && can_trap(method(), code)) {
   1.676 -      // copy the state because it is modified before handle_exception is called
   1.677 -      set_exception_state(state()->copy());
   1.678 -    } else {
   1.679 -      // handle_exception is not called for this bytecode
   1.680 -      set_exception_state(NULL);
   1.681 -    }
   1.682 +    assert(state()->kind() == ValueStack::Parsing, "invalid state kind");
   1.683  
   1.684      // Check for active jsr during OSR compilation
   1.685      if (compilation()->is_osr_compile()
   1.686 @@ -2433,12 +2437,12 @@
   1.687        case Bytecodes::_lmul           : arithmetic_op(longType  , code); break;
   1.688        case Bytecodes::_fmul           : arithmetic_op(floatType , code); break;
   1.689        case Bytecodes::_dmul           : arithmetic_op(doubleType, code); break;
   1.690 -      case Bytecodes::_idiv           : arithmetic_op(intType   , code, lock_stack()); break;
   1.691 -      case Bytecodes::_ldiv           : arithmetic_op(longType  , code, lock_stack()); break;
   1.692 +      case Bytecodes::_idiv           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
   1.693 +      case Bytecodes::_ldiv           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
   1.694        case Bytecodes::_fdiv           : arithmetic_op(floatType , code); break;
   1.695        case Bytecodes::_ddiv           : arithmetic_op(doubleType, code); break;
   1.696 -      case Bytecodes::_irem           : arithmetic_op(intType   , code, lock_stack()); break;
   1.697 -      case Bytecodes::_lrem           : arithmetic_op(longType  , code, lock_stack()); break;
   1.698 +      case Bytecodes::_irem           : arithmetic_op(intType   , code, copy_state_for_exception()); break;
   1.699 +      case Bytecodes::_lrem           : arithmetic_op(longType  , code, copy_state_for_exception()); break;
   1.700        case Bytecodes::_frem           : arithmetic_op(floatType , code); break;
   1.701        case Bytecodes::_drem           : arithmetic_op(doubleType, code); break;
   1.702        case Bytecodes::_ineg           : negate_op(intType   ); break;
   1.703 @@ -2515,11 +2519,10 @@
   1.704        case Bytecodes::_new            : new_instance(s.get_index_u2()); break;
   1.705        case Bytecodes::_newarray       : new_type_array(); break;
   1.706        case Bytecodes::_anewarray      : new_object_array(); break;
   1.707 -      case Bytecodes::_arraylength    : ipush(append(new ArrayLength(apop(), lock_stack()))); break;
   1.708 +      case Bytecodes::_arraylength    : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
   1.709        case Bytecodes::_athrow         : throw_op(s.cur_bci()); break;
   1.710        case Bytecodes::_checkcast      : check_cast(s.get_index_u2()); break;
   1.711        case Bytecodes::_instanceof     : instance_of(s.get_index_u2()); break;
   1.712 -      // Note: we do not have special handling for the monitorenter bytecode if DeoptC1 && DeoptOnAsyncException
   1.713        case Bytecodes::_monitorenter   : monitorenter(apop(), s.cur_bci()); break;
   1.714        case Bytecodes::_monitorexit    : monitorexit (apop(), s.cur_bci()); break;
   1.715        case Bytecodes::_wide           : ShouldNotReachHere(); break;
   1.716 @@ -2546,28 +2549,22 @@
   1.717    if (end == NULL) {
   1.718      // all blocks must end with a BlockEnd instruction => add a Goto
   1.719      end = new Goto(block_at(s.cur_bci()), false);
   1.720 -    _last = _last->set_next(end, prev_bci);
   1.721 +    append(end);
   1.722    }
   1.723    assert(end == last()->as_BlockEnd(), "inconsistency");
   1.724  
   1.725 -  // if the method terminates, we don't need the stack anymore
   1.726 -  if (end->as_Return() != NULL) {
   1.727 -    state()->clear_stack();
   1.728 -  } else if (end->as_Throw() != NULL) {
   1.729 -    // May have exception handler in caller scopes
   1.730 -    state()->truncate_stack(scope()->lock_stack_size());
   1.731 -  }
   1.732 +  assert(end->state() != NULL, "state must already be present");
   1.733 +  assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");
   1.734  
   1.735    // connect to begin & set state
   1.736    // NOTE that inlining may have changed the block we are parsing
   1.737    block()->set_end(end);
   1.738 -  end->set_state(state());
   1.739    // propagate state
   1.740    for (int i = end->number_of_sux() - 1; i >= 0; i--) {
   1.741      BlockBegin* sux = end->sux_at(i);
   1.742      assert(sux->is_predecessor(block()), "predecessor missing");
   1.743      // be careful, bailout if bytecodes are strange
   1.744 -    if (!sux->try_merge(state())) BAILOUT_("block join failed", NULL);
   1.745 +    if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);
   1.746      scope_data()->add_to_work_list(end->sux_at(i));
   1.747    }
   1.748  
   1.749 @@ -2605,7 +2602,6 @@
   1.750  
   1.751  
   1.752  bool GraphBuilder::_can_trap      [Bytecodes::number_of_java_codes];
   1.753 -bool GraphBuilder::_is_async[Bytecodes::number_of_java_codes];
   1.754  
   1.755  void GraphBuilder::initialize() {
   1.756    // the following bytecodes are assumed to potentially
   1.757 @@ -2657,67 +2653,14 @@
   1.758      , Bytecodes::_multianewarray
   1.759      };
   1.760  
   1.761 -  // the following bytecodes are assumed to potentially
   1.762 -  // throw asynchronous exceptions in compiled code due
   1.763 -  // to safepoints (note: these entries could be merged
   1.764 -  // with the can_trap_list - however, we need to know
   1.765 -  // which ones are asynchronous for now - see also the
   1.766 -  // comment in GraphBuilder::handle_exception)
   1.767 -  Bytecodes::Code is_async_list[] =
   1.768 -    { Bytecodes::_ifeq
   1.769 -    , Bytecodes::_ifne
   1.770 -    , Bytecodes::_iflt
   1.771 -    , Bytecodes::_ifge
   1.772 -    , Bytecodes::_ifgt
   1.773 -    , Bytecodes::_ifle
   1.774 -    , Bytecodes::_if_icmpeq
   1.775 -    , Bytecodes::_if_icmpne
   1.776 -    , Bytecodes::_if_icmplt
   1.777 -    , Bytecodes::_if_icmpge
   1.778 -    , Bytecodes::_if_icmpgt
   1.779 -    , Bytecodes::_if_icmple
   1.780 -    , Bytecodes::_if_acmpeq
   1.781 -    , Bytecodes::_if_acmpne
   1.782 -    , Bytecodes::_goto
   1.783 -    , Bytecodes::_jsr
   1.784 -    , Bytecodes::_ret
   1.785 -    , Bytecodes::_tableswitch
   1.786 -    , Bytecodes::_lookupswitch
   1.787 -    , Bytecodes::_ireturn
   1.788 -    , Bytecodes::_lreturn
   1.789 -    , Bytecodes::_freturn
   1.790 -    , Bytecodes::_dreturn
   1.791 -    , Bytecodes::_areturn
   1.792 -    , Bytecodes::_return
   1.793 -    , Bytecodes::_ifnull
   1.794 -    , Bytecodes::_ifnonnull
   1.795 -    , Bytecodes::_goto_w
   1.796 -    , Bytecodes::_jsr_w
   1.797 -    };
   1.798 -
   1.799    // inititialize trap tables
   1.800    for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {
   1.801      _can_trap[i] = false;
   1.802 -    _is_async[i] = false;
   1.803    }
   1.804    // set standard trap info
   1.805    for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {
   1.806      _can_trap[can_trap_list[j]] = true;
   1.807    }
   1.808 -
   1.809 -  // We now deoptimize if an asynchronous exception is thrown. This
   1.810 -  // considerably cleans up corner case issues related to javac's
   1.811 -  // incorrect exception handler ranges for async exceptions and
   1.812 -  // allows us to precisely analyze the types of exceptions from
   1.813 -  // certain bytecodes.
   1.814 -  if (!(DeoptC1 && DeoptOnAsyncException)) {
   1.815 -    // set asynchronous trap info
   1.816 -    for (uint k = 0; k < ARRAY_SIZE(is_async_list); k++) {
   1.817 -      assert(!_can_trap[is_async_list[k]], "can_trap_list and is_async_list should be disjoint");
   1.818 -      _can_trap[is_async_list[k]] = true;
   1.819 -      _is_async[is_async_list[k]] = true;
   1.820 -    }
   1.821 -  }
   1.822  }
   1.823  
   1.824  
   1.825 @@ -2733,7 +2676,7 @@
   1.826    h->set_end(g);
   1.827    h->set(f);
   1.828    // setup header block end state
   1.829 -  ValueStack* s = state->copy(); // can use copy since stack is empty (=> no phis)
   1.830 +  ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)
   1.831    assert(s->stack_is_empty(), "must have empty stack at entry point");
   1.832    g->set_state(s);
   1.833    return h;
   1.834 @@ -2768,8 +2711,8 @@
   1.835    start->set_next(base, 0);
   1.836    start->set_end(base);
   1.837    // create & setup state for start block
   1.838 -  start->set_state(state->copy());
   1.839 -  base->set_state(state->copy());
   1.840 +  start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
   1.841 +  base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
   1.842  
   1.843    if (base->std_entry()->state() == NULL) {
   1.844      // setup states for header blocks
   1.845 @@ -2803,6 +2746,7 @@
   1.846    kill_all();
   1.847    _block = _osr_entry;
   1.848    _state = _osr_entry->state()->copy();
   1.849 +  assert(_state->bci() == osr_bci, "mismatch");
   1.850    _last  = _osr_entry;
   1.851    Value e = append(new OsrEntry());
   1.852    e->set_needs_null_check(false);
   1.853 @@ -2852,7 +2796,6 @@
   1.854    assert(state->caller_state() == NULL, "should be top scope");
   1.855    state->clear_locals();
   1.856    Goto* g = new Goto(target, false);
   1.857 -  g->set_state(_state->copy());
   1.858    append(g);
   1.859    _osr_entry->set_end(g);
   1.860    target->merge(_osr_entry->end()->state());
   1.861 @@ -2862,7 +2805,7 @@
   1.862  
   1.863  
   1.864  ValueStack* GraphBuilder::state_at_entry() {
   1.865 -  ValueStack* state = new ValueStack(scope(), method()->max_locals(), method()->max_stack());
   1.866 +  ValueStack* state = new ValueStack(scope(), NULL);
   1.867  
   1.868    // Set up locals for receiver
   1.869    int idx = 0;
   1.870 @@ -2886,7 +2829,7 @@
   1.871  
   1.872    // lock synchronized method
   1.873    if (method()->is_synchronized()) {
   1.874 -    state->lock(scope(), NULL);
   1.875 +    state->lock(NULL);
   1.876    }
   1.877  
   1.878    return state;
   1.879 @@ -2895,7 +2838,6 @@
   1.880  
   1.881  GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
   1.882    : _scope_data(NULL)
   1.883 -  , _exception_state(NULL)
   1.884    , _instruction_count(0)
   1.885    , _osr_entry(NULL)
   1.886    , _memory(new MemoryBuffer())
   1.887 @@ -2919,7 +2861,6 @@
   1.888  
   1.889    // complete graph
   1.890    _vmap        = new ValueMap();
   1.891 -  scope->compute_lock_stack_size();
   1.892    switch (scope->method()->intrinsic_id()) {
   1.893    case vmIntrinsics::_dabs          : // fall through
   1.894    case vmIntrinsics::_dsqrt         : // fall through
   1.895 @@ -2945,7 +2886,7 @@
   1.896  
   1.897        // setup the initial block state
   1.898        _block = start_block;
   1.899 -      _state = start_block->state()->copy();
   1.900 +      _state = start_block->state()->copy_for_parsing();
   1.901        _last  = start_block;
   1.902        load_local(doubleType, 0);
   1.903  
   1.904 @@ -2957,7 +2898,6 @@
   1.905        // connect the begin and end blocks and we're all done.
   1.906        BlockEnd* end = last()->as_BlockEnd();
   1.907        block()->set_end(end);
   1.908 -      end->set_state(state());
   1.909        break;
   1.910      }
   1.911    default:
   1.912 @@ -2988,13 +2928,38 @@
   1.913  }
   1.914  
   1.915  
   1.916 -ValueStack* GraphBuilder::lock_stack() {
   1.917 -  // return a new ValueStack representing just the current lock stack
   1.918 -  // (for debug info at safepoints in exception throwing or handling)
   1.919 -  ValueStack* new_stack = state()->copy_locks();
   1.920 -  return new_stack;
   1.921 +ValueStack* GraphBuilder::copy_state_before() {
   1.922 +  return copy_state_before_with_bci(bci());
   1.923  }
   1.924  
   1.925 +ValueStack* GraphBuilder::copy_state_exhandling() {
   1.926 +  return copy_state_exhandling_with_bci(bci());
   1.927 +}
   1.928 +
   1.929 +ValueStack* GraphBuilder::copy_state_for_exception() {
   1.930 +  return copy_state_for_exception_with_bci(bci());
   1.931 +}
   1.932 +
   1.933 +ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {
   1.934 +  return state()->copy(ValueStack::StateBefore, bci);
   1.935 +}
   1.936 +
   1.937 +ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {
   1.938 +  if (!has_handler()) return NULL;
   1.939 +  return state()->copy(ValueStack::StateBefore, bci);
   1.940 +}
   1.941 +
   1.942 +ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {
   1.943 +  ValueStack* s = copy_state_exhandling_with_bci(bci);
   1.944 +  if (s == NULL) {
   1.945 +    if (_compilation->env()->jvmti_can_access_local_variables()) {
   1.946 +      s = state()->copy(ValueStack::ExceptionState, bci);
   1.947 +    } else {
   1.948 +      s = state()->copy(ValueStack::EmptyExceptionState, bci);
   1.949 +    }
   1.950 +  }
   1.951 +  return s;
   1.952 +}
   1.953  
   1.954  int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
   1.955    int recur_level = 0;
   1.956 @@ -3177,9 +3142,9 @@
   1.957    // create intrinsic node
   1.958    const bool has_receiver = !callee->is_static();
   1.959    ValueType* result_type = as_ValueType(callee->return_type());
   1.960 +  ValueStack* state_before = copy_state_for_exception();
   1.961  
   1.962    Values* args = state()->pop_arguments(callee->arg_size());
   1.963 -  ValueStack* locks = lock_stack();
   1.964  
   1.965    if (is_profiling()) {
   1.966      // Don't profile in the special case where the root method
   1.967 @@ -3198,7 +3163,7 @@
   1.968      }
   1.969    }
   1.970  
   1.971 -  Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, lock_stack(),
   1.972 +  Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
   1.973                                      preserves_state, cantrap);
   1.974    // append instruction & push result
   1.975    Value value = append_split(result);
   1.976 @@ -3236,10 +3201,9 @@
   1.977    assert(jsr_start_block != NULL, "jsr start block must exist");
   1.978    assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");
   1.979    Goto* goto_sub = new Goto(jsr_start_block, false);
   1.980 -  goto_sub->set_state(state());
   1.981    // Must copy state to avoid wrong sharing when parsing bytecodes
   1.982    assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");
   1.983 -  jsr_start_block->set_state(state()->copy());
   1.984 +  jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));
   1.985    append(goto_sub);
   1.986    _block->set_end(goto_sub);
   1.987    _last = _block = jsr_start_block;
   1.988 @@ -3290,7 +3254,6 @@
   1.989  void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {
   1.990    assert(lock != NULL && sync_handler != NULL, "lock or handler missing");
   1.991  
   1.992 -  set_exception_state(state()->copy());
   1.993    monitorenter(lock, SynchronizationEntryBCI);
   1.994    assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");
   1.995    _last->set_needs_null_check(false);
   1.996 @@ -3332,7 +3295,7 @@
   1.997    int bci = SynchronizationEntryBCI;
   1.998    if (lock) {
   1.999      assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");
  1.1000 -    if (lock->bci() == -99) {
  1.1001 +    if (!lock->is_linked()) {
  1.1002        lock = append_with_bci(lock, -1);
  1.1003      }
  1.1004  
  1.1005 @@ -3342,21 +3305,17 @@
  1.1006      // exit the context of the synchronized method
  1.1007      if (!default_handler) {
  1.1008        pop_scope();
  1.1009 -      _state = _state->copy();
  1.1010 -      bci = _state->scope()->caller_bci();
  1.1011 -      _state = _state->pop_scope()->copy();
  1.1012 +      bci = _state->caller_state()->bci();
  1.1013 +      _state = _state->caller_state()->copy_for_parsing();
  1.1014      }
  1.1015    }
  1.1016  
  1.1017    // perform the throw as if at the the call site
  1.1018    apush(exception);
  1.1019 -
  1.1020 -  set_exception_state(state()->copy());
  1.1021    throw_op(bci);
  1.1022  
  1.1023    BlockEnd* end = last()->as_BlockEnd();
  1.1024    block()->set_end(end);
  1.1025 -  end->set_state(state());
  1.1026  
  1.1027    _block = orig_block;
  1.1028    _state = orig_state;
  1.1029 @@ -3487,7 +3446,7 @@
  1.1030    // Pass parameters into callee state: add assignments
  1.1031    // note: this will also ensure that all arguments are computed before being passed
  1.1032    ValueStack* callee_state = state();
  1.1033 -  ValueStack* caller_state = scope()->caller_state();
  1.1034 +  ValueStack* caller_state = state()->caller_state();
  1.1035    { int i = args_base;
  1.1036      while (i < caller_state->stack_size()) {
  1.1037        const int par_no = i - args_base;
  1.1038 @@ -3502,16 +3461,7 @@
  1.1039    // Note that we preserve locals state in case we can use it later
  1.1040    // (see use of pop_scope() below)
  1.1041    caller_state->truncate_stack(args_base);
  1.1042 -  callee_state->truncate_stack(args_base);
  1.1043 -
  1.1044 -  // Setup state that is used at returns form the inlined method.
  1.1045 -  // This is essentially the state of the continuation block,
  1.1046 -  // but without the return value on stack, if any, this will
  1.1047 -  // be pushed at the return instruction (see method_return).
  1.1048 -  scope_data()->set_continuation_state(caller_state->copy());
  1.1049 -
  1.1050 -  // Compute lock stack size for callee scope now that args have been passed
  1.1051 -  scope()->compute_lock_stack_size();
  1.1052 +  assert(callee_state->stack_size() == 0, "callee stack must be empty");
  1.1053  
  1.1054    Value lock;
  1.1055    BlockBegin* sync_handler;
  1.1056 @@ -3520,11 +3470,8 @@
  1.1057    if (callee->is_synchronized()) {
  1.1058      lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))
  1.1059                                 : state()->local_at(0);
  1.1060 -    sync_handler = new BlockBegin(-1);
  1.1061 +    sync_handler = new BlockBegin(SynchronizationEntryBCI);
  1.1062      inline_sync_entry(lock, sync_handler);
  1.1063 -
  1.1064 -    // recompute the lock stack size
  1.1065 -    scope()->compute_lock_stack_size();
  1.1066    }
  1.1067  
  1.1068  
  1.1069 @@ -3532,7 +3479,6 @@
  1.1070    if (callee_start_block != NULL) {
  1.1071      assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");
  1.1072      Goto* goto_callee = new Goto(callee_start_block, false);
  1.1073 -    goto_callee->set_state(state());
  1.1074      // The state for this goto is in the scope of the callee, so use
  1.1075      // the entry bci for the callee instead of the call site bci.
  1.1076      append_with_bci(goto_callee, 0);
  1.1077 @@ -3579,7 +3525,7 @@
  1.1078        && block() == orig_block
  1.1079        && block() == inline_cleanup_block()) {
  1.1080      _last = inline_cleanup_return_prev();
  1.1081 -    _state = inline_cleanup_state()->pop_scope();
  1.1082 +    _state = inline_cleanup_state();
  1.1083    } else if (continuation_preds == cont->number_of_preds()) {
  1.1084      // Inlining caused that the instructions after the invoke in the
  1.1085      // caller are not reachable any more. So skip filling this block
  1.1086 @@ -3645,8 +3591,7 @@
  1.1087      blb.bci2block()->at_put(0, NULL);
  1.1088    }
  1.1089  
  1.1090 -  callee_scope->set_caller_state(state());
  1.1091 -  set_state(state()->push_scope(callee_scope));
  1.1092 +  set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));
  1.1093  
  1.1094    ScopeData* data = new ScopeData(scope_data());
  1.1095    data->set_scope(callee_scope);
  1.1096 @@ -3670,10 +3615,6 @@
  1.1097    data->set_scope(scope());
  1.1098    data->setup_jsr_xhandlers();
  1.1099    data->set_continuation(continuation());
  1.1100 -  if (continuation() != NULL) {
  1.1101 -    assert(continuation_state() != NULL, "");
  1.1102 -    data->set_continuation_state(continuation_state()->copy());
  1.1103 -  }
  1.1104    data->set_jsr_continuation(jsr_continuation);
  1.1105    _scope_data = data;
  1.1106  }
  1.1107 @@ -3768,6 +3709,7 @@
  1.1108  
  1.1109  
  1.1110  void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
  1.1111 +  ValueStack* state_before = copy_state_for_exception();
  1.1112    ValueType* result_type = as_ValueType(callee->return_type());
  1.1113    assert(result_type->is_int(), "int result");
  1.1114    Values* args = state()->pop_arguments(callee->arg_size());
  1.1115 @@ -3796,7 +3738,7 @@
  1.1116    // know which ones so mark the state as no preserved.  This will
  1.1117    // cause CSE to invalidate memory across it.
  1.1118    bool preserves_state = false;
  1.1119 -  Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, lock_stack(), preserves_state);
  1.1120 +  Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);
  1.1121    append_split(result);
  1.1122    push(result_type, result);
  1.1123    compilation()->set_has_unsafe_access(true);

mercurial