src/share/vm/opto/matcher.cpp

changeset 6457
94c202aa2646
parent 6443
f4f6ae481e1a
parent 5437
fcf521c3fbc6
child 6462
e2722a66aba7
equal deleted inserted replaced
6456:c9f0adfb4a8b 6457:94c202aa2646
2306 2306
2307 // Used by the DFA in dfa_xxx.cpp. Check for a following barrier or 2307 // Used by the DFA in dfa_xxx.cpp. Check for a following barrier or
2308 // atomic instruction acting as a store_load barrier without any 2308 // atomic instruction acting as a store_load barrier without any
2309 // intervening volatile load, and thus we don't need a barrier here. 2309 // intervening volatile load, and thus we don't need a barrier here.
2310 // We retain the Node to act as a compiler ordering barrier. 2310 // We retain the Node to act as a compiler ordering barrier.
2311 bool Matcher::post_store_load_barrier(const Node *vmb) { 2311 bool Matcher::post_store_load_barrier(const Node* vmb) {
2312 Compile *C = Compile::current(); 2312 Compile* C = Compile::current();
2313 assert( vmb->is_MemBar(), "" ); 2313 assert(vmb->is_MemBar(), "");
2314 assert( vmb->Opcode() != Op_MemBarAcquire, "" ); 2314 assert(vmb->Opcode() != Op_MemBarAcquire, "");
2315 const MemBarNode *mem = (const MemBarNode*)vmb; 2315 const MemBarNode* membar = vmb->as_MemBar();
2316 2316
2317 // Get the Proj node, ctrl, that can be used to iterate forward 2317 // Get the Ideal Proj node, ctrl, that can be used to iterate forward
2318 Node *ctrl = NULL; 2318 Node* ctrl = NULL;
2319 DUIterator_Fast imax, i = mem->fast_outs(imax); 2319 for (DUIterator_Fast imax, i = membar->fast_outs(imax); i < imax; i++) {
2320 while( true ) { 2320 Node* p = membar->fast_out(i);
2321 ctrl = mem->fast_out(i); // Throw out-of-bounds if proj not found 2321 assert(p->is_Proj(), "only projections here");
2322 assert( ctrl->is_Proj(), "only projections here" ); 2322 if ((p->as_Proj()->_con == TypeFunc::Control) &&
2323 ProjNode *proj = (ProjNode*)ctrl; 2323 !C->node_arena()->contains(p)) { // Unmatched old-space only
2324 if( proj->_con == TypeFunc::Control && 2324 ctrl = p;
2325 !C->node_arena()->contains(ctrl) ) // Unmatched old-space only
2326 break; 2325 break;
2327 i++; 2326 }
2328 } 2327 }
2329 2328 assert((ctrl != NULL), "missing control projection");
2330 for( DUIterator_Fast jmax, j = ctrl->fast_outs(jmax); j < jmax; j++ ) { 2329
2330 for (DUIterator_Fast jmax, j = ctrl->fast_outs(jmax); j < jmax; j++) {
2331 Node *x = ctrl->fast_out(j); 2331 Node *x = ctrl->fast_out(j);
2332 int xop = x->Opcode(); 2332 int xop = x->Opcode();
2333 2333
2334 // We don't need current barrier if we see another or a lock 2334 // We don't need current barrier if we see another or a lock
2335 // before seeing volatile load. 2335 // before seeing volatile load.
2337 // Op_Fastunlock previously appeared in the Op_* list below. 2337 // Op_Fastunlock previously appeared in the Op_* list below.
2338 // With the advent of 1-0 lock operations we're no longer guaranteed 2338 // With the advent of 1-0 lock operations we're no longer guaranteed
2339 // that a monitor exit operation contains a serializing instruction. 2339 // that a monitor exit operation contains a serializing instruction.
2340 2340
2341 if (xop == Op_MemBarVolatile || 2341 if (xop == Op_MemBarVolatile ||
2342 xop == Op_FastLock ||
2343 xop == Op_CompareAndSwapL || 2342 xop == Op_CompareAndSwapL ||
2344 xop == Op_CompareAndSwapP || 2343 xop == Op_CompareAndSwapP ||
2345 xop == Op_CompareAndSwapN || 2344 xop == Op_CompareAndSwapN ||
2346 xop == Op_CompareAndSwapI) 2345 xop == Op_CompareAndSwapI) {
2347 return true; 2346 return true;
2347 }
2348
2349 // Op_FastLock previously appeared in the Op_* list above.
2350 // With biased locking we're no longer guaranteed that a monitor
2351 // enter operation contains a serializing instruction.
2352 if ((xop == Op_FastLock) && !UseBiasedLocking) {
2353 return true;
2354 }
2348 2355
2349 if (x->is_MemBar()) { 2356 if (x->is_MemBar()) {
2350 // We must retain this membar if there is an upcoming volatile 2357 // We must retain this membar if there is an upcoming volatile
2351 // load, which will be preceded by acquire membar. 2358 // load, which will be followed by acquire membar.
2352 if (xop == Op_MemBarAcquire) 2359 if (xop == Op_MemBarAcquire) {
2353 return false; 2360 return false;
2354 // For other kinds of barriers, check by pretending we 2361 } else {
2355 // are them, and seeing if we can be removed. 2362 // For other kinds of barriers, check by pretending we
2356 else 2363 // are them, and seeing if we can be removed.
2357 return post_store_load_barrier((const MemBarNode*)x); 2364 return post_store_load_barrier(x->as_MemBar());
2358 } 2365 }
2359 2366 }
2360 // Delicate code to detect case of an upcoming fastlock block 2367
2361 if( x->is_If() && x->req() > 1 &&
2362 !C->node_arena()->contains(x) ) { // Unmatched old-space only
2363 Node *iff = x;
2364 Node *bol = iff->in(1);
2365 // The iff might be some random subclass of If or bol might be Con-Top
2366 if (!bol->is_Bool()) return false;
2367 assert( bol->req() > 1, "" );
2368 return (bol->in(1)->Opcode() == Op_FastUnlock);
2369 }
2370 // probably not necessary to check for these 2368 // probably not necessary to check for these
2371 if (x->is_Call() || x->is_SafePoint() || x->is_block_proj()) 2369 if (x->is_Call() || x->is_SafePoint() || x->is_block_proj()) {
2372 return false; 2370 return false;
2371 }
2373 } 2372 }
2374 return false; 2373 return false;
2375 } 2374 }
2376 2375
2377 //============================================================================= 2376 //=============================================================================

mercurial