src/share/vm/opto/escape.cpp

changeset 1063
7bb995fbd3c0
parent 1040
98cb887364d3
child 1219
b2934faac289
     1.1 --- a/src/share/vm/opto/escape.cpp	Mon Mar 09 13:34:00 2009 -0700
     1.2 +++ b/src/share/vm/opto/escape.cpp	Thu Mar 12 18:16:36 2009 -0700
     1.3 @@ -515,7 +515,7 @@
     1.4    // cause the failure in add_offset() with narrow oops since TypeOopPtr()
     1.5    // constructor verifies correctness of the offset.
     1.6    //
     1.7 -  // It could happend on subclass's branch (from the type profiling
     1.8 +  // It could happened on subclass's branch (from the type profiling
     1.9    // inlining) which was not eliminated during parsing since the exactness
    1.10    // of the allocation type was not propagated to the subclass type check.
    1.11    //
    1.12 @@ -703,7 +703,7 @@
    1.13    while (prev != result) {
    1.14      prev = result;
    1.15      if (result == start_mem)
    1.16 -      break;  // hit one of our sentinals
    1.17 +      break;  // hit one of our sentinels
    1.18      if (result->is_Mem()) {
    1.19        const Type *at = phase->type(result->in(MemNode::Address));
    1.20        if (at != Type::TOP) {
    1.21 @@ -720,7 +720,7 @@
    1.22      if (result->is_Proj() && result->as_Proj()->_con == TypeFunc::Memory) {
    1.23        Node *proj_in = result->in(0);
    1.24        if (proj_in->is_Allocate() && proj_in->_idx == (uint)tinst->instance_id()) {
    1.25 -        break;  // hit one of our sentinals
    1.26 +        break;  // hit one of our sentinels
    1.27        } else if (proj_in->is_Call()) {
    1.28          CallNode *call = proj_in->as_Call();
    1.29          if (!call->may_modify(tinst, phase)) {
    1.30 @@ -756,6 +756,16 @@
    1.31        } else {
    1.32          break;
    1.33        }
    1.34 +    } else if (result->Opcode() == Op_SCMemProj) {
    1.35 +      assert(result->in(0)->is_LoadStore(), "sanity");
    1.36 +      const Type *at = phase->type(result->in(0)->in(MemNode::Address));
    1.37 +      if (at != Type::TOP) {
    1.38 +        assert (at->isa_ptr() != NULL, "pointer type required.");
    1.39 +        int idx = C->get_alias_index(at->is_ptr());
    1.40 +        assert(idx != alias_idx, "Object is not scalar replaceable if a LoadStore node access its field");
    1.41 +        break;
    1.42 +      }
    1.43 +      result = result->in(0)->in(MemNode::Memory);
    1.44      }
    1.45    }
    1.46    if (result->is_Phi()) {
    1.47 @@ -794,7 +804,7 @@
    1.48  //  Phase 2:  Process MemNode's from memnode_worklist. compute new address type and
    1.49  //            search the Memory chain for a store with the appropriate type
    1.50  //            address type.  If a Phi is found, create a new version with
    1.51 -//            the approriate memory slices from each of the Phi inputs.
    1.52 +//            the appropriate memory slices from each of the Phi inputs.
    1.53  //            For stores, process the users as follows:
    1.54  //               MemNode:  push on memnode_worklist
    1.55  //               MergeMem: push on mergemem_worklist
    1.56 @@ -1548,7 +1558,7 @@
    1.57        has_non_escaping_obj = true; // Non GlobalEscape
    1.58      Node* n = ptn->_node;
    1.59      if (n->is_Allocate() && ptn->_scalar_replaceable ) {
    1.60 -      // Push scalar replaceable alocations on alloc_worklist
    1.61 +      // Push scalar replaceable allocations on alloc_worklist
    1.62        // for processing in split_unique_types().
    1.63        alloc_worklist.append(n);
    1.64      }

mercurial