85 |
85 |
86 extern void print_alias_types(); |
86 extern void print_alias_types(); |
87 |
87 |
88 #endif |
88 #endif |
89 |
89 |
|
90 static Node *step_through_mergemem(PhaseGVN *phase, MergeMemNode *mmem, const TypePtr *tp, const TypePtr *adr_check, outputStream *st) { |
|
91 uint alias_idx = phase->C->get_alias_index(tp); |
|
92 Node *mem = mmem; |
|
93 #ifdef ASSERT |
|
94 { |
|
95 // Check that current type is consistent with the alias index used during graph construction |
|
96 assert(alias_idx >= Compile::AliasIdxRaw, "must not be a bad alias_idx"); |
|
97 bool consistent = adr_check == NULL || adr_check->empty() || |
|
98 phase->C->must_alias(adr_check, alias_idx ); |
|
99 // Sometimes dead array references collapse to a[-1], a[-2], or a[-3] |
|
100 if( !consistent && adr_check != NULL && !adr_check->empty() && |
|
101 tp->isa_aryptr() && tp->offset() == Type::OffsetBot && |
|
102 adr_check->isa_aryptr() && adr_check->offset() != Type::OffsetBot && |
|
103 ( adr_check->offset() == arrayOopDesc::length_offset_in_bytes() || |
|
104 adr_check->offset() == oopDesc::klass_offset_in_bytes() || |
|
105 adr_check->offset() == oopDesc::mark_offset_in_bytes() ) ) { |
|
106 // don't assert if it is dead code. |
|
107 consistent = true; |
|
108 } |
|
109 if( !consistent ) { |
|
110 st->print("alias_idx==%d, adr_check==", alias_idx); |
|
111 if( adr_check == NULL ) { |
|
112 st->print("NULL"); |
|
113 } else { |
|
114 adr_check->dump(); |
|
115 } |
|
116 st->cr(); |
|
117 print_alias_types(); |
|
118 assert(consistent, "adr_check must match alias idx"); |
|
119 } |
|
120 } |
|
121 #endif |
|
122 // TypeInstPtr::NOTNULL+any is an OOP with unknown offset - generally |
|
123 // means an array I have not precisely typed yet. Do not do any |
|
124 // alias stuff with it any time soon. |
|
125 const TypeOopPtr *tinst = tp->isa_oopptr(); |
|
126 if( tp->base() != Type::AnyPtr && |
|
127 !(tinst && |
|
128 tinst->klass()->is_java_lang_Object() && |
|
129 tinst->offset() == Type::OffsetBot) ) { |
|
130 // compress paths and change unreachable cycles to TOP |
|
131 // If not, we can update the input infinitely along a MergeMem cycle |
|
132 // Equivalent code in PhiNode::Ideal |
|
133 Node* m = phase->transform(mmem); |
|
134 // If tranformed to a MergeMem, get the desired slice |
|
135 // Otherwise the returned node represents memory for every slice |
|
136 mem = (m->is_MergeMem())? m->as_MergeMem()->memory_at(alias_idx) : m; |
|
137 // Update input if it is progress over what we have now |
|
138 } |
|
139 return mem; |
|
140 } |
|
141 |
90 //--------------------------Ideal_common--------------------------------------- |
142 //--------------------------Ideal_common--------------------------------------- |
91 // Look for degenerate control and memory inputs. Bypass MergeMem inputs. |
143 // Look for degenerate control and memory inputs. Bypass MergeMem inputs. |
92 // Unhook non-raw memories from complete (macro-expanded) initializations. |
144 // Unhook non-raw memories from complete (macro-expanded) initializations. |
93 Node *MemNode::Ideal_common(PhaseGVN *phase, bool can_reshape) { |
145 Node *MemNode::Ideal_common(PhaseGVN *phase, bool can_reshape) { |
94 // If our control input is a dead region, kill all below the region |
146 // If our control input is a dead region, kill all below the region |
117 // and find_previous_store(). |
169 // and find_previous_store(). |
118 |
170 |
119 if (mem->is_MergeMem()) { |
171 if (mem->is_MergeMem()) { |
120 MergeMemNode* mmem = mem->as_MergeMem(); |
172 MergeMemNode* mmem = mem->as_MergeMem(); |
121 const TypePtr *tp = t_adr->is_ptr(); |
173 const TypePtr *tp = t_adr->is_ptr(); |
122 uint alias_idx = phase->C->get_alias_index(tp); |
174 |
123 #ifdef ASSERT |
175 mem = step_through_mergemem(phase, mmem, tp, adr_type(), tty); |
124 { |
|
125 // Check that current type is consistent with the alias index used during graph construction |
|
126 assert(alias_idx >= Compile::AliasIdxRaw, "must not be a bad alias_idx"); |
|
127 const TypePtr *adr_t = adr_type(); |
|
128 bool consistent = adr_t == NULL || adr_t->empty() || phase->C->must_alias(adr_t, alias_idx ); |
|
129 // Sometimes dead array references collapse to a[-1], a[-2], or a[-3] |
|
130 if( !consistent && adr_t != NULL && !adr_t->empty() && |
|
131 tp->isa_aryptr() && tp->offset() == Type::OffsetBot && |
|
132 adr_t->isa_aryptr() && adr_t->offset() != Type::OffsetBot && |
|
133 ( adr_t->offset() == arrayOopDesc::length_offset_in_bytes() || |
|
134 adr_t->offset() == oopDesc::klass_offset_in_bytes() || |
|
135 adr_t->offset() == oopDesc::mark_offset_in_bytes() ) ) { |
|
136 // don't assert if it is dead code. |
|
137 consistent = true; |
|
138 } |
|
139 if( !consistent ) { |
|
140 tty->print("alias_idx==%d, adr_type()==", alias_idx); if( adr_t == NULL ) { tty->print("NULL"); } else { adr_t->dump(); } |
|
141 tty->cr(); |
|
142 print_alias_types(); |
|
143 assert(consistent, "adr_type must match alias idx"); |
|
144 } |
|
145 } |
|
146 #endif |
|
147 // TypeInstPtr::NOTNULL+any is an OOP with unknown offset - generally |
|
148 // means an array I have not precisely typed yet. Do not do any |
|
149 // alias stuff with it any time soon. |
|
150 const TypeInstPtr *tinst = tp->isa_instptr(); |
|
151 if( tp->base() != Type::AnyPtr && |
|
152 !(tinst && |
|
153 tinst->klass()->is_java_lang_Object() && |
|
154 tinst->offset() == Type::OffsetBot) ) { |
|
155 // compress paths and change unreachable cycles to TOP |
|
156 // If not, we can update the input infinitely along a MergeMem cycle |
|
157 // Equivalent code in PhiNode::Ideal |
|
158 Node* m = phase->transform(mmem); |
|
159 // If tranformed to a MergeMem, get the desired slice |
|
160 // Otherwise the returned node represents memory for every slice |
|
161 mem = (m->is_MergeMem())? m->as_MergeMem()->memory_at(alias_idx) : m; |
|
162 // Update input if it is progress over what we have now |
|
163 } |
|
164 } |
176 } |
165 |
177 |
166 if (mem != old_mem) { |
178 if (mem != old_mem) { |
167 set_req(MemNode::Memory, mem); |
179 set_req(MemNode::Memory, mem); |
168 return this; |
180 return this; |
532 { |
544 { |
533 assert(adr->as_Proj()->_con == TypeFunc::Parms, "must be return value"); |
545 assert(adr->as_Proj()->_con == TypeFunc::Parms, "must be return value"); |
534 const Node* call = adr->in(0); |
546 const Node* call = adr->in(0); |
535 if (call->is_CallStaticJava()) { |
547 if (call->is_CallStaticJava()) { |
536 const CallStaticJavaNode* call_java = call->as_CallStaticJava(); |
548 const CallStaticJavaNode* call_java = call->as_CallStaticJava(); |
537 assert(call_java && call_java->method() == NULL, "must be runtime call"); |
549 const TypeTuple *r = call_java->tf()->range(); |
|
550 assert(r->cnt() > TypeFunc::Parms, "must return value"); |
|
551 const Type* ret_type = r->field_at(TypeFunc::Parms); |
|
552 assert(ret_type && ret_type->isa_ptr(), "must return pointer"); |
538 // We further presume that this is one of |
553 // We further presume that this is one of |
539 // new_instance_Java, new_array_Java, or |
554 // new_instance_Java, new_array_Java, or |
540 // the like, but do not assert for this. |
555 // the like, but do not assert for this. |
541 } else if (call->is_Allocate()) { |
556 } else if (call->is_Allocate()) { |
542 // similar case to new_instance_Java, etc. |
557 // similar case to new_instance_Java, etc. |
730 } |
745 } |
731 |
746 |
732 return NULL; |
747 return NULL; |
733 } |
748 } |
734 |
749 |
|
750 //----------------------is_instance_field_load_with_local_phi------------------ |
|
751 bool LoadNode::is_instance_field_load_with_local_phi(Node* ctrl) { |
|
752 if( in(MemNode::Memory)->is_Phi() && in(MemNode::Memory)->in(0) == ctrl && |
|
753 in(MemNode::Address)->is_AddP() ) { |
|
754 const TypeOopPtr* t_oop = in(MemNode::Address)->bottom_type()->isa_oopptr(); |
|
755 // Only instances. |
|
756 if( t_oop != NULL && t_oop->is_instance_field() && |
|
757 t_oop->offset() != Type::OffsetBot && |
|
758 t_oop->offset() != Type::OffsetTop) { |
|
759 return true; |
|
760 } |
|
761 } |
|
762 return false; |
|
763 } |
|
764 |
735 //------------------------------Identity--------------------------------------- |
765 //------------------------------Identity--------------------------------------- |
736 // Loads are identity if previous store is to same address |
766 // Loads are identity if previous store is to same address |
737 Node *LoadNode::Identity( PhaseTransform *phase ) { |
767 Node *LoadNode::Identity( PhaseTransform *phase ) { |
738 // If the previous store-maker is the right kind of Store, and the store is |
768 // If the previous store-maker is the right kind of Store, and the store is |
739 // to the same address, then we are equal to the value stored. |
769 // to the same address, then we are equal to the value stored. |
752 } |
782 } |
753 // (This works even when value is a Con, but LoadNode::Value |
783 // (This works even when value is a Con, but LoadNode::Value |
754 // usually runs first, producing the singleton type of the Con.) |
784 // usually runs first, producing the singleton type of the Con.) |
755 return value; |
785 return value; |
756 } |
786 } |
|
787 |
|
788 // Search for an existing data phi which was generated before for the same |
|
789 // instance's field to avoid infinite genertion of phis in a loop. |
|
790 Node *region = mem->in(0); |
|
791 if (is_instance_field_load_with_local_phi(region)) { |
|
792 const TypePtr *addr_t = in(MemNode::Address)->bottom_type()->isa_ptr(); |
|
793 int this_index = phase->C->get_alias_index(addr_t); |
|
794 int this_offset = addr_t->offset(); |
|
795 int this_id = addr_t->is_oopptr()->instance_id(); |
|
796 const Type* this_type = bottom_type(); |
|
797 for (DUIterator_Fast imax, i = region->fast_outs(imax); i < imax; i++) { |
|
798 Node* phi = region->fast_out(i); |
|
799 if (phi->is_Phi() && phi != mem && |
|
800 phi->as_Phi()->is_same_inst_field(this_type, this_id, this_index, this_offset)) { |
|
801 return phi; |
|
802 } |
|
803 } |
|
804 } |
|
805 |
757 return this; |
806 return this; |
758 } |
807 } |
759 |
808 |
760 |
809 |
761 // Returns true if the AliasType refers to the field that holds the |
810 // Returns true if the AliasType refers to the field that holds the |
1187 Node* value = can_see_stored_value(mem,phase); |
1236 Node* value = can_see_stored_value(mem,phase); |
1188 if (value != NULL && value->is_Con()) |
1237 if (value != NULL && value->is_Con()) |
1189 return value->bottom_type(); |
1238 return value->bottom_type(); |
1190 } |
1239 } |
1191 |
1240 |
|
1241 const TypeOopPtr *tinst = tp->isa_oopptr(); |
|
1242 if (tinst != NULL && tinst->is_instance_field()) { |
|
1243 // If we have an instance type and our memory input is the |
|
1244 // programs's initial memory state, there is no matching store, |
|
1245 // so just return a zero of the appropriate type |
|
1246 Node *mem = in(MemNode::Memory); |
|
1247 if (mem->is_Parm() && mem->in(0)->is_Start()) { |
|
1248 assert(mem->as_Parm()->_con == TypeFunc::Memory, "must be memory Parm"); |
|
1249 return Type::get_zero_type(_type->basic_type()); |
|
1250 } |
|
1251 } |
1192 return _type; |
1252 return _type; |
1193 } |
1253 } |
1194 |
1254 |
1195 //------------------------------match_edge------------------------------------- |
1255 //------------------------------match_edge------------------------------------- |
1196 // Do we Match on this edge index or not? Match only the address. |
1256 // Do we Match on this edge index or not? Match only the address. |
1710 bool StoreNode::value_never_loaded( PhaseTransform *phase) const { |
1770 bool StoreNode::value_never_loaded( PhaseTransform *phase) const { |
1711 Node *adr = in(Address); |
1771 Node *adr = in(Address); |
1712 const TypeOopPtr *adr_oop = phase->type(adr)->isa_oopptr(); |
1772 const TypeOopPtr *adr_oop = phase->type(adr)->isa_oopptr(); |
1713 if (adr_oop == NULL) |
1773 if (adr_oop == NULL) |
1714 return false; |
1774 return false; |
1715 if (!adr_oop->is_instance()) |
1775 if (!adr_oop->is_instance_field()) |
1716 return false; // if not a distinct instance, there may be aliases of the address |
1776 return false; // if not a distinct instance, there may be aliases of the address |
1717 for (DUIterator_Fast imax, i = adr->fast_outs(imax); i < imax; i++) { |
1777 for (DUIterator_Fast imax, i = adr->fast_outs(imax); i < imax; i++) { |
1718 Node *use = adr->fast_out(i); |
1778 Node *use = adr->fast_out(i); |
1719 int opc = use->Opcode(); |
1779 int opc = use->Opcode(); |
1720 if (use->is_Load() || use->is_LoadStore()) { |
1780 if (use->is_Load() || use->is_LoadStore()) { |