1.1 --- a/src/share/vm/opto/escape.cpp Mon May 06 19:49:23 2013 -0700 1.2 +++ b/src/share/vm/opto/escape.cpp Wed May 08 15:08:01 2013 -0700 1.3 @@ -63,15 +63,19 @@ 1.4 // EA brings benefits only when the code has allocations and/or locks which 1.5 // are represented by ideal Macro nodes. 1.6 int cnt = C->macro_count(); 1.7 - for( int i=0; i < cnt; i++ ) { 1.8 + for (int i = 0; i < cnt; i++) { 1.9 Node *n = C->macro_node(i); 1.10 - if ( n->is_Allocate() ) 1.11 + if (n->is_Allocate()) 1.12 return true; 1.13 - if( n->is_Lock() ) { 1.14 + if (n->is_Lock()) { 1.15 Node* obj = n->as_Lock()->obj_node()->uncast(); 1.16 - if( !(obj->is_Parm() || obj->is_Con()) ) 1.17 + if (!(obj->is_Parm() || obj->is_Con())) 1.18 return true; 1.19 } 1.20 + if (n->is_CallStaticJava() && 1.21 + n->as_CallStaticJava()->is_boxing_method()) { 1.22 + return true; 1.23 + } 1.24 } 1.25 return false; 1.26 } 1.27 @@ -115,7 +119,7 @@ 1.28 { Compile::TracePhase t3("connectionGraph", &Phase::_t_connectionGraph, true); 1.29 1.30 // 1. Populate Connection Graph (CG) with PointsTo nodes. 1.31 - ideal_nodes.map(C->unique(), NULL); // preallocate space 1.32 + ideal_nodes.map(C->live_nodes(), NULL); // preallocate space 1.33 // Initialize worklist 1.34 if (C->root() != NULL) { 1.35 ideal_nodes.push(C->root()); 1.36 @@ -152,8 +156,11 @@ 1.37 // escape status of the associated Allocate node some of them 1.38 // may be eliminated. 1.39 storestore_worklist.append(n); 1.40 + } else if (n->is_MemBar() && (n->Opcode() == Op_MemBarRelease) && 1.41 + (n->req() > MemBarNode::Precedent)) { 1.42 + record_for_optimizer(n); 1.43 #ifdef ASSERT 1.44 - } else if(n->is_AddP()) { 1.45 + } else if (n->is_AddP()) { 1.46 // Collect address nodes for graph verification. 1.47 addp_worklist.append(n); 1.48 #endif 1.49 @@ -206,8 +213,15 @@ 1.50 int non_escaped_length = non_escaped_worklist.length(); 1.51 for (int next = 0; next < non_escaped_length; next++) { 1.52 JavaObjectNode* ptn = non_escaped_worklist.at(next); 1.53 - if (ptn->escape_state() == PointsToNode::NoEscape && 1.54 - ptn->scalar_replaceable()) { 1.55 + bool noescape = (ptn->escape_state() == PointsToNode::NoEscape); 1.56 + Node* n = ptn->ideal_node(); 1.57 + if (n->is_Allocate()) { 1.58 + n->as_Allocate()->_is_non_escaping = noescape; 1.59 + } 1.60 + if (n->is_CallStaticJava()) { 1.61 + n->as_CallStaticJava()->_is_non_escaping = noescape; 1.62 + } 1.63 + if (noescape && ptn->scalar_replaceable()) { 1.64 adjust_scalar_replaceable_state(ptn); 1.65 if (ptn->scalar_replaceable()) { 1.66 alloc_worklist.append(ptn->ideal_node()); 1.67 @@ -330,8 +344,10 @@ 1.68 // Don't mark as processed since call's arguments have to be processed. 1.69 delayed_worklist->push(n); 1.70 // Check if a call returns an object. 1.71 - if (n->as_Call()->returns_pointer() && 1.72 - n->as_Call()->proj_out(TypeFunc::Parms) != NULL) { 1.73 + if ((n->as_Call()->returns_pointer() && 1.74 + n->as_Call()->proj_out(TypeFunc::Parms) != NULL) || 1.75 + (n->is_CallStaticJava() && 1.76 + n->as_CallStaticJava()->is_boxing_method())) { 1.77 add_call_node(n->as_Call()); 1.78 } 1.79 } 1.80 @@ -387,8 +403,8 @@ 1.81 case Op_ConNKlass: { 1.82 // assume all oop constants globally escape except for null 1.83 PointsToNode::EscapeState es; 1.84 - if (igvn->type(n) == TypePtr::NULL_PTR || 1.85 - igvn->type(n) == TypeNarrowOop::NULL_PTR) { 1.86 + const Type* t = igvn->type(n); 1.87 + if (t == TypePtr::NULL_PTR || t == TypeNarrowOop::NULL_PTR) { 1.88 es = PointsToNode::NoEscape; 1.89 } else { 1.90 es = PointsToNode::GlobalEscape; 1.91 @@ -797,6 +813,9 @@ 1.92 // Returns a newly allocated unescaped object. 1.93 add_java_object(call, PointsToNode::NoEscape); 1.94 ptnode_adr(call_idx)->set_scalar_replaceable(false); 1.95 + } else if (meth->is_boxing_method()) { 1.96 + // Returns boxing object 1.97 + add_java_object(call, PointsToNode::NoEscape); 1.98 } else { 1.99 BCEscapeAnalyzer* call_analyzer = meth->get_bcea(); 1.100 call_analyzer->copy_dependencies(_compile->dependencies()); 1.101 @@ -943,6 +962,9 @@ 1.102 assert((name == NULL || strcmp(name, "uncommon_trap") != 0), "normal calls only"); 1.103 #endif 1.104 ciMethod* meth = call->as_CallJava()->method(); 1.105 + if ((meth != NULL) && meth->is_boxing_method()) { 1.106 + break; // Boxing methods do not modify any oops. 1.107 + } 1.108 BCEscapeAnalyzer* call_analyzer = (meth !=NULL) ? meth->get_bcea() : NULL; 1.109 // fall-through if not a Java method or no analyzer information 1.110 if (call_analyzer != NULL) { 1.111 @@ -2744,6 +2766,11 @@ 1.112 // so it could be eliminated if it has no uses. 1.113 alloc->as_Allocate()->_is_scalar_replaceable = true; 1.114 } 1.115 + if (alloc->is_CallStaticJava()) { 1.116 + // Set the scalar_replaceable flag for boxing method 1.117 + // so it could be eliminated if it has no uses. 1.118 + alloc->as_CallStaticJava()->_is_scalar_replaceable = true; 1.119 + } 1.120 continue; 1.121 } 1.122 if (!n->is_CheckCastPP()) { // not unique CheckCastPP. 1.123 @@ -2782,6 +2809,11 @@ 1.124 // so it could be eliminated. 1.125 alloc->as_Allocate()->_is_scalar_replaceable = true; 1.126 } 1.127 + if (alloc->is_CallStaticJava()) { 1.128 + // Set the scalar_replaceable flag for boxing method 1.129 + // so it could be eliminated. 1.130 + alloc->as_CallStaticJava()->_is_scalar_replaceable = true; 1.131 + } 1.132 set_escape_state(ptnode_adr(n->_idx), es); // CheckCastPP escape state 1.133 // in order for an object to be scalar-replaceable, it must be: 1.134 // - a direct allocation (not a call returning an object) 1.135 @@ -2911,7 +2943,9 @@ 1.136 // Load/store to instance's field 1.137 memnode_worklist.append_if_missing(use); 1.138 } else if (use->is_MemBar()) { 1.139 - memnode_worklist.append_if_missing(use); 1.140 + if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge 1.141 + memnode_worklist.append_if_missing(use); 1.142 + } 1.143 } else if (use->is_AddP() && use->outcnt() > 0) { // No dead nodes 1.144 Node* addp2 = find_second_addp(use, n); 1.145 if (addp2 != NULL) { 1.146 @@ -3028,7 +3062,9 @@ 1.147 continue; 1.148 memnode_worklist.append_if_missing(use); 1.149 } else if (use->is_MemBar()) { 1.150 - memnode_worklist.append_if_missing(use); 1.151 + if (use->in(TypeFunc::Memory) == n) { // Ignore precedent edge 1.152 + memnode_worklist.append_if_missing(use); 1.153 + } 1.154 #ifdef ASSERT 1.155 } else if(use->is_Mem()) { 1.156 assert(use->in(MemNode::Memory) != n, "EA: missing memory path"); 1.157 @@ -3264,7 +3300,12 @@ 1.158 if (ptn == NULL || !ptn->is_JavaObject()) 1.159 continue; 1.160 PointsToNode::EscapeState es = ptn->escape_state(); 1.161 - if (ptn->ideal_node()->is_Allocate() && (es == PointsToNode::NoEscape || Verbose)) { 1.162 + if ((es != PointsToNode::NoEscape) && !Verbose) { 1.163 + continue; 1.164 + } 1.165 + Node* n = ptn->ideal_node(); 1.166 + if (n->is_Allocate() || (n->is_CallStaticJava() && 1.167 + n->as_CallStaticJava()->is_boxing_method())) { 1.168 if (first) { 1.169 tty->cr(); 1.170 tty->print("======== Connection graph for ");