src/share/vm/opto/memnode.cpp

changeset 2816
286c498ae0d4
parent 2810
66b0e2371912
parent 2784
92add02409c9
child 3047
f1c12354c3f7
equal deleted inserted replaced
2815:01fd6090fdd8 2816:286c498ae0d4
2153 if (p) return (p == NodeSentinel) ? NULL : p; 2153 if (p) return (p == NodeSentinel) ? NULL : p;
2154 2154
2155 Node* mem = in(MemNode::Memory); 2155 Node* mem = in(MemNode::Memory);
2156 Node* address = in(MemNode::Address); 2156 Node* address = in(MemNode::Address);
2157 2157
2158 // Back-to-back stores to same address? Fold em up. 2158 // Back-to-back stores to same address? Fold em up. Generally
2159 // Generally unsafe if I have intervening uses... 2159 // unsafe if I have intervening uses... Also disallowed for StoreCM
2160 if (mem->is_Store() && phase->eqv_uncast(mem->in(MemNode::Address), address)) { 2160 // since they must follow each StoreP operation. Redundant StoreCMs
2161 // are eliminated just before matching in final_graph_reshape.
2162 if (mem->is_Store() && phase->eqv_uncast(mem->in(MemNode::Address), address) &&
2163 mem->Opcode() != Op_StoreCM) {
2161 // Looking at a dead closed cycle of memory? 2164 // Looking at a dead closed cycle of memory?
2162 assert(mem != mem->in(MemNode::Memory), "dead loop in StoreNode::Ideal"); 2165 assert(mem != mem->in(MemNode::Memory), "dead loop in StoreNode::Ideal");
2163 2166
2164 assert(Opcode() == mem->Opcode() || 2167 assert(Opcode() == mem->Opcode() ||
2165 phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw, 2168 phase->C->get_alias_index(adr_type()) == Compile::AliasIdxRaw,

mercurial