657 , _work_list(NULL) |
657 , _work_list(NULL) |
658 , _parsing_jsr(false) |
658 , _parsing_jsr(false) |
659 , _jsr_xhandlers(NULL) |
659 , _jsr_xhandlers(NULL) |
660 , _caller_stack_size(-1) |
660 , _caller_stack_size(-1) |
661 , _continuation(NULL) |
661 , _continuation(NULL) |
662 , _continuation_state(NULL) |
|
663 , _num_returns(0) |
662 , _num_returns(0) |
664 , _cleanup_block(NULL) |
663 , _cleanup_block(NULL) |
665 , _cleanup_return_prev(NULL) |
664 , _cleanup_return_prev(NULL) |
666 , _cleanup_state(NULL) |
665 , _cleanup_state(NULL) |
667 { |
666 { |
940 state->store_local(index, round_fp(x)); |
932 state->store_local(index, round_fp(x)); |
941 } |
933 } |
942 |
934 |
943 |
935 |
944 void GraphBuilder::load_indexed(BasicType type) { |
936 void GraphBuilder::load_indexed(BasicType type) { |
|
937 ValueStack* state_before = copy_state_for_exception(); |
945 Value index = ipop(); |
938 Value index = ipop(); |
946 Value array = apop(); |
939 Value array = apop(); |
947 Value length = NULL; |
940 Value length = NULL; |
948 if (CSEArrayLength || |
941 if (CSEArrayLength || |
949 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || |
942 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || |
950 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { |
943 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { |
951 length = append(new ArrayLength(array, lock_stack())); |
944 length = append(new ArrayLength(array, state_before)); |
952 } |
945 } |
953 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, lock_stack()))); |
946 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before))); |
954 } |
947 } |
955 |
948 |
956 |
949 |
957 void GraphBuilder::store_indexed(BasicType type) { |
950 void GraphBuilder::store_indexed(BasicType type) { |
|
951 ValueStack* state_before = copy_state_for_exception(); |
958 Value value = pop(as_ValueType(type)); |
952 Value value = pop(as_ValueType(type)); |
959 Value index = ipop(); |
953 Value index = ipop(); |
960 Value array = apop(); |
954 Value array = apop(); |
961 Value length = NULL; |
955 Value length = NULL; |
962 if (CSEArrayLength || |
956 if (CSEArrayLength || |
963 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || |
957 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || |
964 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { |
958 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { |
965 length = append(new ArrayLength(array, lock_stack())); |
959 length = append(new ArrayLength(array, state_before)); |
966 } |
960 } |
967 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, lock_stack()); |
961 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before); |
968 append(result); |
962 append(result); |
969 _memory->store_value(value); |
963 _memory->store_value(value); |
970 |
964 |
971 if (type == T_OBJECT && is_profiling()) { |
965 if (type == T_OBJECT && is_profiling()) { |
972 // Note that we'd collect profile data in this method if we wanted it. |
966 // Note that we'd collect profile data in this method if we wanted it. |
1061 break; |
1055 break; |
1062 } |
1056 } |
1063 } |
1057 } |
1064 |
1058 |
1065 |
1059 |
1066 void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* stack) { |
1060 void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) { |
1067 Value y = pop(type); |
1061 Value y = pop(type); |
1068 Value x = pop(type); |
1062 Value x = pop(type); |
1069 // NOTE: strictfp can be queried from current method since we don't |
1063 // NOTE: strictfp can be queried from current method since we don't |
1070 // inline methods with differing strictfp bits |
1064 // inline methods with differing strictfp bits |
1071 Value res = new ArithmeticOp(code, x, y, method()->is_strict(), stack); |
1065 Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before); |
1072 // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level |
1066 // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level |
1073 res = append(res); |
1067 res = append(res); |
1074 if (method()->is_strict()) { |
1068 if (method()->is_strict()) { |
1075 res = round_fp(res); |
1069 res = round_fp(res); |
1076 } |
1070 } |
1130 push(type, append(new LogicOp(code, x, y))); |
1124 push(type, append(new LogicOp(code, x, y))); |
1131 } |
1125 } |
1132 |
1126 |
1133 |
1127 |
1134 void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) { |
1128 void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) { |
1135 ValueStack* state_before = state()->copy(); |
1129 ValueStack* state_before = copy_state_before(); |
1136 Value y = pop(type); |
1130 Value y = pop(type); |
1137 Value x = pop(type); |
1131 Value x = pop(type); |
1138 ipush(append(new CompareOp(code, x, y, state_before))); |
1132 ipush(append(new CompareOp(code, x, y, state_before))); |
1139 } |
1133 } |
1140 |
1134 |
1215 } |
1209 } |
1216 |
1210 |
1217 |
1211 |
1218 void GraphBuilder::if_zero(ValueType* type, If::Condition cond) { |
1212 void GraphBuilder::if_zero(ValueType* type, If::Condition cond) { |
1219 Value y = append(new Constant(intZero)); |
1213 Value y = append(new Constant(intZero)); |
1220 ValueStack* state_before = state()->copy(); |
1214 ValueStack* state_before = copy_state_before(); |
1221 Value x = ipop(); |
1215 Value x = ipop(); |
1222 if_node(x, cond, y, state_before); |
1216 if_node(x, cond, y, state_before); |
1223 } |
1217 } |
1224 |
1218 |
1225 |
1219 |
1226 void GraphBuilder::if_null(ValueType* type, If::Condition cond) { |
1220 void GraphBuilder::if_null(ValueType* type, If::Condition cond) { |
1227 Value y = append(new Constant(objectNull)); |
1221 Value y = append(new Constant(objectNull)); |
1228 ValueStack* state_before = state()->copy(); |
1222 ValueStack* state_before = copy_state_before(); |
1229 Value x = apop(); |
1223 Value x = apop(); |
1230 if_node(x, cond, y, state_before); |
1224 if_node(x, cond, y, state_before); |
1231 } |
1225 } |
1232 |
1226 |
1233 |
1227 |
1234 void GraphBuilder::if_same(ValueType* type, If::Condition cond) { |
1228 void GraphBuilder::if_same(ValueType* type, If::Condition cond) { |
1235 ValueStack* state_before = state()->copy(); |
1229 ValueStack* state_before = copy_state_before(); |
1236 Value y = pop(type); |
1230 Value y = pop(type); |
1237 Value x = pop(type); |
1231 Value x = pop(type); |
1238 if_node(x, cond, y, state_before); |
1232 if_node(x, cond, y, state_before); |
1239 } |
1233 } |
1240 |
1234 |
1280 // can handle canonicalized forms that contain more than one node. |
1274 // can handle canonicalized forms that contain more than one node. |
1281 Value key = append(new Constant(new IntConstant(switch_->low_key()))); |
1275 Value key = append(new Constant(new IntConstant(switch_->low_key()))); |
1282 BlockBegin* tsux = block_at(bci() + switch_->dest_offset_at(0)); |
1276 BlockBegin* tsux = block_at(bci() + switch_->dest_offset_at(0)); |
1283 BlockBegin* fsux = block_at(bci() + switch_->default_offset()); |
1277 BlockBegin* fsux = block_at(bci() + switch_->default_offset()); |
1284 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); |
1278 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); |
1285 ValueStack* state_before = is_bb ? state() : NULL; |
1279 ValueStack* state_before = is_bb ? copy_state_before() : NULL; |
1286 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); |
1280 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); |
1287 } else { |
1281 } else { |
1288 // collect successors |
1282 // collect successors |
1289 BlockList* sux = new BlockList(l + 1, NULL); |
1283 BlockList* sux = new BlockList(l + 1, NULL); |
1290 int i; |
1284 int i; |
1293 sux->at_put(i, block_at(bci() + switch_->dest_offset_at(i))); |
1287 sux->at_put(i, block_at(bci() + switch_->dest_offset_at(i))); |
1294 if (switch_->dest_offset_at(i) < 0) has_bb = true; |
1288 if (switch_->dest_offset_at(i) < 0) has_bb = true; |
1295 } |
1289 } |
1296 // add default successor |
1290 // add default successor |
1297 sux->at_put(i, block_at(bci() + switch_->default_offset())); |
1291 sux->at_put(i, block_at(bci() + switch_->default_offset())); |
1298 ValueStack* state_before = has_bb ? state() : NULL; |
1292 ValueStack* state_before = has_bb ? copy_state_before() : NULL; |
1299 append(new TableSwitch(ipop(), sux, switch_->low_key(), state_before, has_bb)); |
1293 append(new TableSwitch(ipop(), sux, switch_->low_key(), state_before, has_bb)); |
1300 } |
1294 } |
1301 } |
1295 } |
1302 |
1296 |
1303 |
1297 |
1312 LookupswitchPair* pair = switch_->pair_at(0); |
1306 LookupswitchPair* pair = switch_->pair_at(0); |
1313 Value key = append(new Constant(new IntConstant(pair->match()))); |
1307 Value key = append(new Constant(new IntConstant(pair->match()))); |
1314 BlockBegin* tsux = block_at(bci() + pair->offset()); |
1308 BlockBegin* tsux = block_at(bci() + pair->offset()); |
1315 BlockBegin* fsux = block_at(bci() + switch_->default_offset()); |
1309 BlockBegin* fsux = block_at(bci() + switch_->default_offset()); |
1316 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); |
1310 bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); |
1317 ValueStack* state_before = is_bb ? state() : NULL; |
1311 ValueStack* state_before = is_bb ? copy_state_before() : NULL; |
1318 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); |
1312 append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); |
1319 } else { |
1313 } else { |
1320 // collect successors & keys |
1314 // collect successors & keys |
1321 BlockList* sux = new BlockList(l + 1, NULL); |
1315 BlockList* sux = new BlockList(l + 1, NULL); |
1322 intArray* keys = new intArray(l, 0); |
1316 intArray* keys = new intArray(l, 0); |
1328 sux->at_put(i, block_at(bci() + pair->offset())); |
1322 sux->at_put(i, block_at(bci() + pair->offset())); |
1329 keys->at_put(i, pair->match()); |
1323 keys->at_put(i, pair->match()); |
1330 } |
1324 } |
1331 // add default successor |
1325 // add default successor |
1332 sux->at_put(i, block_at(bci() + switch_->default_offset())); |
1326 sux->at_put(i, block_at(bci() + switch_->default_offset())); |
1333 ValueStack* state_before = has_bb ? state() : NULL; |
1327 ValueStack* state_before = has_bb ? copy_state_before() : NULL; |
1334 append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb)); |
1328 append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb)); |
1335 } |
1329 } |
1336 } |
1330 } |
1337 |
1331 |
1338 void GraphBuilder::call_register_finalizer() { |
1332 void GraphBuilder::call_register_finalizer() { |
1339 // If the receiver requires finalization then emit code to perform |
1333 // If the receiver requires finalization then emit code to perform |
1340 // the registration on return. |
1334 // the registration on return. |
1341 |
1335 |
1342 // Gather some type information about the receiver |
1336 // Gather some type information about the receiver |
1343 Value receiver = state()->load_local(0); |
1337 Value receiver = state()->local_at(0); |
1344 assert(receiver != NULL, "must have a receiver"); |
1338 assert(receiver != NULL, "must have a receiver"); |
1345 ciType* declared_type = receiver->declared_type(); |
1339 ciType* declared_type = receiver->declared_type(); |
1346 ciType* exact_type = receiver->exact_type(); |
1340 ciType* exact_type = receiver->exact_type(); |
1347 if (exact_type == NULL && |
1341 if (exact_type == NULL && |
1348 receiver->as_Local() && |
1342 receiver->as_Local() && |
1371 } |
1365 } |
1372 } |
1366 } |
1373 |
1367 |
1374 if (needs_check) { |
1368 if (needs_check) { |
1375 // Perform the registration of finalizable objects. |
1369 // Perform the registration of finalizable objects. |
|
1370 ValueStack* state_before = copy_state_for_exception(); |
1376 load_local(objectType, 0); |
1371 load_local(objectType, 0); |
1377 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init, |
1372 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init, |
1378 state()->pop_arguments(1), |
1373 state()->pop_arguments(1), |
1379 true, lock_stack(), true)); |
1374 true, state_before, true)); |
1380 } |
1375 } |
1381 } |
1376 } |
1382 |
1377 |
1383 |
1378 |
1384 void GraphBuilder::method_return(Value x) { |
1379 void GraphBuilder::method_return(Value x) { |
1393 assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet"); |
1388 assert(!method()->is_synchronized() || InlineSynchronizedMethods, "can not inline synchronized methods yet"); |
1394 |
1389 |
1395 // If the inlined method is synchronized, the monitor must be |
1390 // If the inlined method is synchronized, the monitor must be |
1396 // released before we jump to the continuation block. |
1391 // released before we jump to the continuation block. |
1397 if (method()->is_synchronized()) { |
1392 if (method()->is_synchronized()) { |
1398 int i = state()->caller_state()->locks_size(); |
1393 assert(state()->locks_size() == 1, "receiver must be locked here"); |
1399 assert(state()->locks_size() == i + 1, "receiver must be locked here"); |
1394 monitorexit(state()->lock_at(0), SynchronizationEntryBCI); |
1400 monitorexit(state()->lock_at(i), SynchronizationEntryBCI); |
1395 } |
1401 } |
1396 |
1402 |
1397 // State at end of inlined method is the state of the caller |
1403 state()->truncate_stack(caller_stack_size()); |
1398 // without the method parameters on stack, including the |
|
1399 // return value, if any, of the inlined method on operand stack. |
|
1400 set_state(state()->caller_state()->copy_for_parsing()); |
1404 if (x != NULL) { |
1401 if (x != NULL) { |
1405 state()->push(x->type(), x); |
1402 state()->push(x->type(), x); |
1406 } |
1403 } |
1407 Goto* goto_callee = new Goto(continuation(), false); |
1404 Goto* goto_callee = new Goto(continuation(), false); |
1408 |
1405 |
1409 // See whether this is the first return; if so, store off some |
1406 // See whether this is the first return; if so, store off some |
1410 // of the state for later examination |
1407 // of the state for later examination |
1411 if (num_returns() == 0) { |
1408 if (num_returns() == 0) { |
1412 set_inline_cleanup_info(_block, _last, state()); |
1409 set_inline_cleanup_info(_block, _last, state()); |
1413 } |
|
1414 |
|
1415 // State at end of inlined method is the state of the caller |
|
1416 // without the method parameters on stack, including the |
|
1417 // return value, if any, of the inlined method on operand stack. |
|
1418 set_state(scope_data()->continuation_state()->copy()); |
|
1419 if (x) { |
|
1420 state()->push(x->type(), x); |
|
1421 } |
1410 } |
1422 |
1411 |
1423 // The current bci() is in the wrong scope, so use the bci() of |
1412 // The current bci() is in the wrong scope, so use the bci() of |
1424 // the continuation point. |
1413 // the continuation point. |
1425 append_with_bci(goto_callee, scope_data()->continuation()->bci()); |
1414 append_with_bci(goto_callee, scope_data()->continuation()->bci()); |
1453 // call will_link again to determine if the field is valid. |
1442 // call will_link again to determine if the field is valid. |
1454 const bool is_loaded = holder->is_loaded() && |
1443 const bool is_loaded = holder->is_loaded() && |
1455 field->will_link(method()->holder(), code); |
1444 field->will_link(method()->holder(), code); |
1456 const bool is_initialized = is_loaded && holder->is_initialized(); |
1445 const bool is_initialized = is_loaded && holder->is_initialized(); |
1457 |
1446 |
1458 ValueStack* state_copy = NULL; |
1447 ValueStack* state_before = NULL; |
1459 if (!is_initialized || PatchALot) { |
1448 if (!is_initialized || PatchALot) { |
1460 // save state before instruction for debug info when |
1449 // save state before instruction for debug info when |
1461 // deoptimization happens during patching |
1450 // deoptimization happens during patching |
1462 state_copy = state()->copy(); |
1451 state_before = copy_state_before(); |
1463 } |
1452 } |
1464 |
1453 |
1465 Value obj = NULL; |
1454 Value obj = NULL; |
1466 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) { |
1455 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) { |
1467 // commoning of class constants should only occur if the class is |
1456 // commoning of class constants should only occur if the class is |
1468 // fully initialized and resolved in this constant pool. The will_link test |
1457 // fully initialized and resolved in this constant pool. The will_link test |
1469 // above essentially checks if this class is resolved in this constant pool |
1458 // above essentially checks if this class is resolved in this constant pool |
1470 // so, the is_initialized flag should be suffiect. |
1459 // so, the is_initialized flag should be suffiect. |
1471 if (state_copy != NULL) { |
1460 if (state_before != NULL) { |
1472 // build a patching constant |
1461 // build a patching constant |
1473 obj = new Constant(new ClassConstant(holder), state_copy); |
1462 obj = new Constant(new ClassConstant(holder), state_before); |
1474 } else { |
1463 } else { |
1475 obj = new Constant(new ClassConstant(holder)); |
1464 obj = new Constant(new ClassConstant(holder)); |
1476 } |
1465 } |
1477 } |
1466 } |
1478 |
1467 |
1497 constant = new Constant(as_ValueType(field_val)); |
1486 constant = new Constant(as_ValueType(field_val)); |
1498 } |
1487 } |
1499 } |
1488 } |
1500 if (constant != NULL) { |
1489 if (constant != NULL) { |
1501 push(type, append(constant)); |
1490 push(type, append(constant)); |
1502 state_copy = NULL; // Not a potential deoptimization point (see set_state_before logic below) |
|
1503 } else { |
1491 } else { |
|
1492 if (state_before == NULL) { |
|
1493 state_before = copy_state_for_exception(); |
|
1494 } |
1504 push(type, append(new LoadField(append(obj), offset, field, true, |
1495 push(type, append(new LoadField(append(obj), offset, field, true, |
1505 lock_stack(), state_copy, is_loaded, is_initialized))); |
1496 state_before, is_loaded, is_initialized))); |
1506 } |
1497 } |
1507 break; |
1498 break; |
1508 } |
1499 } |
1509 case Bytecodes::_putstatic: |
1500 case Bytecodes::_putstatic: |
1510 { Value val = pop(type); |
1501 { Value val = pop(type); |
1511 append(new StoreField(append(obj), offset, field, val, true, lock_stack(), state_copy, is_loaded, is_initialized)); |
1502 if (state_before == NULL) { |
|
1503 state_before = copy_state_for_exception(); |
|
1504 } |
|
1505 append(new StoreField(append(obj), offset, field, val, true, state_before, is_loaded, is_initialized)); |
1512 } |
1506 } |
1513 break; |
1507 break; |
1514 case Bytecodes::_getfield : |
1508 case Bytecodes::_getfield : |
1515 { |
1509 { |
1516 LoadField* load = new LoadField(apop(), offset, field, false, lock_stack(), state_copy, is_loaded, true); |
1510 if (state_before == NULL) { |
|
1511 state_before = copy_state_for_exception(); |
|
1512 } |
|
1513 LoadField* load = new LoadField(apop(), offset, field, false, state_before, is_loaded, true); |
1517 Value replacement = is_loaded ? _memory->load(load) : load; |
1514 Value replacement = is_loaded ? _memory->load(load) : load; |
1518 if (replacement != load) { |
1515 if (replacement != load) { |
1519 assert(replacement->bci() != -99 || replacement->as_Phi() || replacement->as_Local(), |
1516 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked"); |
1520 "should already by linked"); |
|
1521 push(type, replacement); |
1517 push(type, replacement); |
1522 } else { |
1518 } else { |
1523 push(type, append(load)); |
1519 push(type, append(load)); |
1524 } |
1520 } |
1525 break; |
1521 break; |
1526 } |
1522 } |
1527 |
1523 |
1528 case Bytecodes::_putfield : |
1524 case Bytecodes::_putfield : |
1529 { Value val = pop(type); |
1525 { Value val = pop(type); |
1530 StoreField* store = new StoreField(apop(), offset, field, val, false, lock_stack(), state_copy, is_loaded, true); |
1526 if (state_before == NULL) { |
|
1527 state_before = copy_state_for_exception(); |
|
1528 } |
|
1529 StoreField* store = new StoreField(apop(), offset, field, val, false, state_before, is_loaded, true); |
1531 if (is_loaded) store = _memory->store(store); |
1530 if (is_loaded) store = _memory->store(store); |
1532 if (store != NULL) { |
1531 if (store != NULL) { |
1533 append(store); |
1532 append(store); |
1534 } |
1533 } |
1535 } |
1534 } |
1730 bool is_invokedynamic = code == Bytecodes::_invokedynamic; |
1729 bool is_invokedynamic = code == Bytecodes::_invokedynamic; |
1731 ValueType* result_type = as_ValueType(target->return_type()); |
1730 ValueType* result_type = as_ValueType(target->return_type()); |
1732 |
1731 |
1733 // We require the debug info to be the "state before" because |
1732 // We require the debug info to be the "state before" because |
1734 // invokedynamics may deoptimize. |
1733 // invokedynamics may deoptimize. |
1735 ValueStack* state_before = is_invokedynamic ? state()->copy() : NULL; |
1734 ValueStack* state_before = is_invokedynamic ? copy_state_before() : copy_state_exhandling(); |
1736 |
1735 |
1737 Values* args = state()->pop_arguments(target->arg_size_no_receiver()); |
1736 Values* args = state()->pop_arguments(target->arg_size_no_receiver()); |
1738 Value recv = has_receiver ? apop() : NULL; |
1737 Value recv = has_receiver ? apop() : NULL; |
1739 int vtable_index = methodOopDesc::invalid_vtable_index; |
1738 int vtable_index = methodOopDesc::invalid_vtable_index; |
1740 |
1739 |
1793 } |
1792 } |
1794 } |
1793 } |
1795 |
1794 |
1796 |
1795 |
1797 void GraphBuilder::new_instance(int klass_index) { |
1796 void GraphBuilder::new_instance(int klass_index) { |
|
1797 ValueStack* state_before = copy_state_exhandling(); |
1798 bool will_link; |
1798 bool will_link; |
1799 ciKlass* klass = stream()->get_klass(will_link); |
1799 ciKlass* klass = stream()->get_klass(will_link); |
1800 assert(klass->is_instance_klass(), "must be an instance klass"); |
1800 assert(klass->is_instance_klass(), "must be an instance klass"); |
1801 NewInstance* new_instance = new NewInstance(klass->as_instance_klass()); |
1801 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before); |
1802 _memory->new_instance(new_instance); |
1802 _memory->new_instance(new_instance); |
1803 apush(append_split(new_instance)); |
1803 apush(append_split(new_instance)); |
1804 } |
1804 } |
1805 |
1805 |
1806 |
1806 |
1807 void GraphBuilder::new_type_array() { |
1807 void GraphBuilder::new_type_array() { |
1808 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index()))); |
1808 ValueStack* state_before = copy_state_exhandling(); |
|
1809 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before))); |
1809 } |
1810 } |
1810 |
1811 |
1811 |
1812 |
1812 void GraphBuilder::new_object_array() { |
1813 void GraphBuilder::new_object_array() { |
1813 bool will_link; |
1814 bool will_link; |
1814 ciKlass* klass = stream()->get_klass(will_link); |
1815 ciKlass* klass = stream()->get_klass(will_link); |
1815 ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; |
1816 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling(); |
1816 NewArray* n = new NewObjectArray(klass, ipop(), state_before); |
1817 NewArray* n = new NewObjectArray(klass, ipop(), state_before); |
1817 apush(append_split(n)); |
1818 apush(append_split(n)); |
1818 } |
1819 } |
1819 |
1820 |
1820 |
1821 |
1836 |
1837 |
1837 |
1838 |
1838 void GraphBuilder::check_cast(int klass_index) { |
1839 void GraphBuilder::check_cast(int klass_index) { |
1839 bool will_link; |
1840 bool will_link; |
1840 ciKlass* klass = stream()->get_klass(will_link); |
1841 ciKlass* klass = stream()->get_klass(will_link); |
1841 ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; |
1842 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception(); |
1842 CheckCast* c = new CheckCast(klass, apop(), state_before); |
1843 CheckCast* c = new CheckCast(klass, apop(), state_before); |
1843 apush(append_split(c)); |
1844 apush(append_split(c)); |
1844 c->set_direct_compare(direct_compare(klass)); |
1845 c->set_direct_compare(direct_compare(klass)); |
1845 |
1846 |
1846 if (is_profiling()) { |
1847 if (is_profiling()) { |
1857 |
1858 |
1858 |
1859 |
1859 void GraphBuilder::instance_of(int klass_index) { |
1860 void GraphBuilder::instance_of(int klass_index) { |
1860 bool will_link; |
1861 bool will_link; |
1861 ciKlass* klass = stream()->get_klass(will_link); |
1862 ciKlass* klass = stream()->get_klass(will_link); |
1862 ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; |
1863 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling(); |
1863 InstanceOf* i = new InstanceOf(klass, apop(), state_before); |
1864 InstanceOf* i = new InstanceOf(klass, apop(), state_before); |
1864 ipush(append_split(i)); |
1865 ipush(append_split(i)); |
1865 i->set_direct_compare(direct_compare(klass)); |
1866 i->set_direct_compare(direct_compare(klass)); |
1866 |
1867 |
1867 if (is_profiling()) { |
1868 if (is_profiling()) { |
1877 } |
1878 } |
1878 |
1879 |
1879 |
1880 |
1880 void GraphBuilder::monitorenter(Value x, int bci) { |
1881 void GraphBuilder::monitorenter(Value x, int bci) { |
1881 // save state before locking in case of deoptimization after a NullPointerException |
1882 // save state before locking in case of deoptimization after a NullPointerException |
1882 ValueStack* lock_stack_before = lock_stack(); |
1883 ValueStack* state_before = copy_state_for_exception_with_bci(bci); |
1883 append_with_bci(new MonitorEnter(x, state()->lock(scope(), x), lock_stack_before), bci); |
1884 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci); |
1884 kill_all(); |
1885 kill_all(); |
1885 } |
1886 } |
1886 |
1887 |
1887 |
1888 |
1888 void GraphBuilder::monitorexit(Value x, int bci) { |
1889 void GraphBuilder::monitorexit(Value x, int bci) { |
1889 // Note: the comment below is only relevant for the case where we do |
|
1890 // not deoptimize due to asynchronous exceptions (!(DeoptC1 && |
|
1891 // DeoptOnAsyncException), which is not used anymore) |
|
1892 |
|
1893 // Note: Potentially, the monitor state in an exception handler |
|
1894 // can be wrong due to wrong 'initialization' of the handler |
|
1895 // via a wrong asynchronous exception path. This can happen, |
|
1896 // if the exception handler range for asynchronous exceptions |
|
1897 // is too long (see also java bug 4327029, and comment in |
|
1898 // GraphBuilder::handle_exception()). This may cause 'under- |
|
1899 // flow' of the monitor stack => bailout instead. |
|
1900 if (state()->locks_size() < 1) BAILOUT("monitor stack underflow"); |
|
1901 append_with_bci(new MonitorExit(x, state()->unlock()), bci); |
1890 append_with_bci(new MonitorExit(x, state()->unlock()), bci); |
1902 kill_all(); |
1891 kill_all(); |
1903 } |
1892 } |
1904 |
1893 |
1905 |
1894 |
1906 void GraphBuilder::new_multi_array(int dimensions) { |
1895 void GraphBuilder::new_multi_array(int dimensions) { |
1907 bool will_link; |
1896 bool will_link; |
1908 ciKlass* klass = stream()->get_klass(will_link); |
1897 ciKlass* klass = stream()->get_klass(will_link); |
1909 ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; |
1898 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling(); |
1910 |
1899 |
1911 Values* dims = new Values(dimensions, NULL); |
1900 Values* dims = new Values(dimensions, NULL); |
1912 // fill in all dimensions |
1901 // fill in all dimensions |
1913 int i = dimensions; |
1902 int i = dimensions; |
1914 while (i-- > 0) dims->at_put(i, ipop()); |
1903 while (i-- > 0) dims->at_put(i, ipop()); |
1919 |
1908 |
1920 |
1909 |
1921 void GraphBuilder::throw_op(int bci) { |
1910 void GraphBuilder::throw_op(int bci) { |
1922 // We require that the debug info for a Throw be the "state before" |
1911 // We require that the debug info for a Throw be the "state before" |
1923 // the Throw (i.e., exception oop is still on TOS) |
1912 // the Throw (i.e., exception oop is still on TOS) |
1924 ValueStack* state_before = state()->copy(); |
1913 ValueStack* state_before = copy_state_before_with_bci(bci); |
1925 Throw* t = new Throw(apop(), state_before); |
1914 Throw* t = new Throw(apop(), state_before); |
|
1915 // operand stack not needed after a throw |
|
1916 state()->truncate_stack(0); |
1926 append_with_bci(t, bci); |
1917 append_with_bci(t, bci); |
1927 } |
1918 } |
1928 |
1919 |
1929 |
1920 |
1930 Value GraphBuilder::round_fp(Value fp_value) { |
1921 Value GraphBuilder::round_fp(Value fp_value) { |
1945 |
1936 |
1946 |
1937 |
1947 Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) { |
1938 Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) { |
1948 Canonicalizer canon(compilation(), instr, bci); |
1939 Canonicalizer canon(compilation(), instr, bci); |
1949 Instruction* i1 = canon.canonical(); |
1940 Instruction* i1 = canon.canonical(); |
1950 if (i1->bci() != -99) { |
1941 if (i1->is_linked() || !i1->can_be_linked()) { |
1951 // Canonicalizer returned an instruction which was already |
1942 // Canonicalizer returned an instruction which was already |
1952 // appended so simply return it. |
1943 // appended so simply return it. |
1953 return i1; |
1944 return i1; |
1954 } else if (UseLocalValueNumbering) { |
1945 } |
|
1946 |
|
1947 if (UseLocalValueNumbering) { |
1955 // Lookup the instruction in the ValueMap and add it to the map if |
1948 // Lookup the instruction in the ValueMap and add it to the map if |
1956 // it's not found. |
1949 // it's not found. |
1957 Instruction* i2 = vmap()->find_insert(i1); |
1950 Instruction* i2 = vmap()->find_insert(i1); |
1958 if (i2 != i1) { |
1951 if (i2 != i1) { |
1959 // found an entry in the value map, so just return it. |
1952 // found an entry in the value map, so just return it. |
1960 assert(i2->bci() != -1, "should already be linked"); |
1953 assert(i2->is_linked(), "should already be linked"); |
1961 return i2; |
1954 return i2; |
1962 } |
1955 } |
1963 ValueNumberingEffects vne(vmap()); |
1956 ValueNumberingEffects vne(vmap()); |
1964 i1->visit(&vne); |
1957 i1->visit(&vne); |
1965 } |
1958 } |
1966 |
1959 |
1967 if (i1->as_Phi() == NULL && i1->as_Local() == NULL) { |
1960 // i1 was not eliminated => append it |
1968 // i1 was not eliminated => append it |
1961 assert(i1->next() == NULL, "shouldn't already be linked"); |
1969 assert(i1->next() == NULL, "shouldn't already be linked"); |
1962 _last = _last->set_next(i1, canon.bci()); |
1970 _last = _last->set_next(i1, canon.bci()); |
1963 |
1971 if (++_instruction_count >= InstructionCountCutoff |
1964 if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) { |
1972 && !bailed_out()) { |
1965 // set the bailout state but complete normal processing. We |
1973 // set the bailout state but complete normal processing. We |
1966 // might do a little more work before noticing the bailout so we |
1974 // might do a little more work before noticing the bailout so we |
1967 // want processing to continue normally until it's noticed. |
1975 // want processing to continue normally until it's noticed. |
1968 bailout("Method and/or inlining is too large"); |
1976 bailout("Method and/or inlining is too large"); |
1969 } |
1977 } |
|
1978 |
1970 |
1979 #ifndef PRODUCT |
1971 #ifndef PRODUCT |
1980 if (PrintIRDuringConstruction) { |
1972 if (PrintIRDuringConstruction) { |
1981 InstructionPrinter ip; |
1973 InstructionPrinter ip; |
1982 ip.print_line(i1); |
1974 ip.print_line(i1); |
1983 if (Verbose) { |
1975 if (Verbose) { |
1984 state()->print(); |
1976 state()->print(); |
1985 } |
1977 } |
1986 } |
1978 } |
1987 #endif |
1979 #endif |
1988 assert(_last == i1, "adjust code below"); |
1980 |
1989 StateSplit* s = i1->as_StateSplit(); |
1981 // save state after modification of operand stack for StateSplit instructions |
1990 if (s != NULL && i1->as_BlockEnd() == NULL) { |
1982 StateSplit* s = i1->as_StateSplit(); |
1991 if (EliminateFieldAccess) { |
1983 if (s != NULL) { |
1992 Intrinsic* intrinsic = s->as_Intrinsic(); |
1984 if (EliminateFieldAccess) { |
1993 if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) { |
1985 Intrinsic* intrinsic = s->as_Intrinsic(); |
1994 _memory->kill(); |
1986 if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) { |
1995 } |
1987 _memory->kill(); |
1996 } |
1988 } |
1997 s->set_state(state()->copy()); |
1989 } |
1998 } |
1990 s->set_state(state()->copy(ValueStack::StateAfter, canon.bci())); |
1999 // set up exception handlers for this instruction if necessary |
1991 } |
2000 if (i1->can_trap()) { |
1992 |
2001 assert(exception_state() != NULL || !has_handler(), "must have setup exception state"); |
1993 // set up exception handlers for this instruction if necessary |
2002 i1->set_exception_handlers(handle_exception(bci)); |
1994 if (i1->can_trap()) { |
2003 } |
1995 i1->set_exception_handlers(handle_exception(i1)); |
|
1996 assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state"); |
2004 } |
1997 } |
2005 return i1; |
1998 return i1; |
2006 } |
1999 } |
2007 |
2000 |
2008 |
2001 |
2030 return; |
2023 return; |
2031 } |
2024 } |
2032 } |
2025 } |
2033 } |
2026 } |
2034 } |
2027 } |
2035 append(new NullCheck(value, lock_stack())); |
2028 append(new NullCheck(value, copy_state_for_exception())); |
2036 } |
2029 } |
2037 |
2030 |
2038 |
2031 |
2039 |
2032 |
2040 XHandlers* GraphBuilder::handle_exception(int cur_bci) { |
2033 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) { |
2041 // fast path if it is guaranteed that no exception handlers are present |
2034 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) { |
2042 if (!has_handler()) { |
2035 assert(instruction->exception_state() == NULL |
2043 // TODO: check if return NULL is possible (avoids empty lists) |
2036 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState |
|
2037 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->jvmti_can_access_local_variables()), |
|
2038 "exception_state should be of exception kind"); |
2044 return new XHandlers(); |
2039 return new XHandlers(); |
2045 } |
2040 } |
2046 |
2041 |
2047 XHandlers* exception_handlers = new XHandlers(); |
2042 XHandlers* exception_handlers = new XHandlers(); |
2048 ScopeData* cur_scope_data = scope_data(); |
2043 ScopeData* cur_scope_data = scope_data(); |
2049 ValueStack* s = exception_state(); |
2044 ValueStack* cur_state = instruction->state_before(); |
|
2045 ValueStack* prev_state = NULL; |
2050 int scope_count = 0; |
2046 int scope_count = 0; |
2051 |
2047 |
2052 assert(s != NULL, "exception state must be set"); |
2048 assert(cur_state != NULL, "state_before must be set"); |
2053 do { |
2049 do { |
2054 assert(cur_scope_data->scope() == s->scope(), "scopes do not match"); |
2050 int cur_bci = cur_state->bci(); |
|
2051 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match"); |
2055 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci"); |
2052 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci"); |
2056 |
2053 |
2057 // join with all potential exception handlers |
2054 // join with all potential exception handlers |
2058 XHandlers* list = cur_scope_data->xhandlers(); |
2055 XHandlers* list = cur_scope_data->xhandlers(); |
2059 const int n = list->length(); |
2056 const int n = list->length(); |
2073 assert(entry->bci() == h->handler_bci(), "must match"); |
2070 assert(entry->bci() == h->handler_bci(), "must match"); |
2074 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond"); |
2071 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond"); |
2075 |
2072 |
2076 // previously this was a BAILOUT, but this is not necessary |
2073 // previously this was a BAILOUT, but this is not necessary |
2077 // now because asynchronous exceptions are not handled this way. |
2074 // now because asynchronous exceptions are not handled this way. |
2078 assert(entry->state() == NULL || s->locks_size() == entry->state()->locks_size(), "locks do not match"); |
2075 assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match"); |
2079 |
2076 |
2080 // xhandler start with an empty expression stack |
2077 // xhandler start with an empty expression stack |
2081 s->truncate_stack(cur_scope_data->caller_stack_size()); |
2078 if (cur_state->stack_size() != 0) { |
|
2079 cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci()); |
|
2080 } |
|
2081 if (instruction->exception_state() == NULL) { |
|
2082 instruction->set_exception_state(cur_state); |
|
2083 } |
2082 |
2084 |
2083 // Note: Usually this join must work. However, very |
2085 // Note: Usually this join must work. However, very |
2084 // complicated jsr-ret structures where we don't ret from |
2086 // complicated jsr-ret structures where we don't ret from |
2085 // the subroutine can cause the objects on the monitor |
2087 // the subroutine can cause the objects on the monitor |
2086 // stacks to not match because blocks can be parsed twice. |
2088 // stacks to not match because blocks can be parsed twice. |
2087 // The only test case we've seen so far which exhibits this |
2089 // The only test case we've seen so far which exhibits this |
2088 // problem is caught by the infinite recursion test in |
2090 // problem is caught by the infinite recursion test in |
2089 // GraphBuilder::jsr() if the join doesn't work. |
2091 // GraphBuilder::jsr() if the join doesn't work. |
2090 if (!entry->try_merge(s)) { |
2092 if (!entry->try_merge(cur_state)) { |
2091 BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers); |
2093 BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers); |
2092 } |
2094 } |
2093 |
2095 |
2094 // add current state for correct handling of phi functions at begin of xhandler |
2096 // add current state for correct handling of phi functions at begin of xhandler |
2095 int phi_operand = entry->add_exception_state(s); |
2097 int phi_operand = entry->add_exception_state(cur_state); |
2096 |
2098 |
2097 // add entry to the list of xhandlers of this block |
2099 // add entry to the list of xhandlers of this block |
2098 _block->add_exception_handler(entry); |
2100 _block->add_exception_handler(entry); |
2099 |
2101 |
2100 // add back-edge from xhandler entry to this block |
2102 // add back-edge from xhandler entry to this block |
2117 return exception_handlers; |
2119 return exception_handlers; |
2118 } |
2120 } |
2119 } |
2121 } |
2120 } |
2122 } |
2121 |
2123 |
|
2124 if (exception_handlers->length() == 0) { |
|
2125 // This scope and all callees do not handle exceptions, so the local |
|
2126 // variables of this scope are not needed. However, the scope itself is |
|
2127 // required for a correct exception stack trace -> clear out the locals. |
|
2128 if (_compilation->env()->jvmti_can_access_local_variables()) { |
|
2129 cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci()); |
|
2130 } else { |
|
2131 cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci()); |
|
2132 } |
|
2133 if (prev_state != NULL) { |
|
2134 prev_state->set_caller_state(cur_state); |
|
2135 } |
|
2136 if (instruction->exception_state() == NULL) { |
|
2137 instruction->set_exception_state(cur_state); |
|
2138 } |
|
2139 } |
|
2140 |
2122 // Set up iteration for next time. |
2141 // Set up iteration for next time. |
2123 // If parsing a jsr, do not grab exception handlers from the |
2142 // If parsing a jsr, do not grab exception handlers from the |
2124 // parent scopes for this method (already got them, and they |
2143 // parent scopes for this method (already got them, and they |
2125 // needed to be cloned) |
2144 // needed to be cloned) |
2126 if (cur_scope_data->parsing_jsr()) { |
2145 |
2127 IRScope* tmp_scope = cur_scope_data->scope(); |
2146 while (cur_scope_data->parsing_jsr()) { |
2128 while (cur_scope_data->parent() != NULL && |
|
2129 cur_scope_data->parent()->scope() == tmp_scope) { |
|
2130 cur_scope_data = cur_scope_data->parent(); |
|
2131 } |
|
2132 } |
|
2133 if (cur_scope_data != NULL) { |
|
2134 if (cur_scope_data->parent() != NULL) { |
|
2135 // must use pop_scope instead of caller_state to preserve all monitors |
|
2136 s = s->pop_scope(); |
|
2137 } |
|
2138 cur_bci = cur_scope_data->scope()->caller_bci(); |
|
2139 cur_scope_data = cur_scope_data->parent(); |
2147 cur_scope_data = cur_scope_data->parent(); |
2140 scope_count++; |
2148 } |
2141 } |
2149 |
|
2150 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match"); |
|
2151 assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler"); |
|
2152 |
|
2153 prev_state = cur_state; |
|
2154 cur_state = cur_state->caller_state(); |
|
2155 cur_scope_data = cur_scope_data->parent(); |
|
2156 scope_count++; |
2142 } while (cur_scope_data != NULL); |
2157 } while (cur_scope_data != NULL); |
2143 |
2158 |
2144 return exception_handlers; |
2159 return exception_handlers; |
2145 } |
2160 } |
2146 |
2161 |
2241 for_each_phi_fun(b, phi, |
2256 for_each_phi_fun(b, phi, |
2242 assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification"); |
2257 assert(phi->operand_count() != 1 || phi->subst() != phi, "missed trivial simplification"); |
2243 ); |
2258 ); |
2244 |
2259 |
2245 ValueStack* state = b->state()->caller_state(); |
2260 ValueStack* state = b->state()->caller_state(); |
2246 int index; |
2261 for_each_state_value(state, value, |
2247 Value value; |
2262 Phi* phi = value->as_Phi(); |
2248 for_each_state(state) { |
2263 assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state"); |
2249 for_each_local_value(state, index, value) { |
2264 ); |
2250 Phi* phi = value->as_Phi(); |
|
2251 assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state"); |
|
2252 } |
|
2253 } |
|
2254 #endif |
2265 #endif |
2255 } |
2266 } |
2256 |
2267 |
2257 // This method is called after all blocks are filled with HIR instructions |
2268 // This method is called after all blocks are filled with HIR instructions |
2258 // It eliminates all Phi functions of the form x = [y, y] and x = [y, x] |
2269 // It eliminates all Phi functions of the form x = [y, y] and x = [y, x] |
2299 } |
2310 } |
2300 |
2311 |
2301 while (!bailed_out() && last()->as_BlockEnd() == NULL && |
2312 while (!bailed_out() && last()->as_BlockEnd() == NULL && |
2302 (code = stream()->next()) != ciBytecodeStream::EOBC() && |
2313 (code = stream()->next()) != ciBytecodeStream::EOBC() && |
2303 (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) { |
2314 (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) { |
2304 |
2315 assert(state()->kind() == ValueStack::Parsing, "invalid state kind"); |
2305 if (has_handler() && can_trap(method(), code)) { |
|
2306 // copy the state because it is modified before handle_exception is called |
|
2307 set_exception_state(state()->copy()); |
|
2308 } else { |
|
2309 // handle_exception is not called for this bytecode |
|
2310 set_exception_state(NULL); |
|
2311 } |
|
2312 |
2316 |
2313 // Check for active jsr during OSR compilation |
2317 // Check for active jsr during OSR compilation |
2314 if (compilation()->is_osr_compile() |
2318 if (compilation()->is_osr_compile() |
2315 && scope()->is_top_scope() |
2319 && scope()->is_top_scope() |
2316 && parsing_jsr() |
2320 && parsing_jsr() |
2431 case Bytecodes::_dsub : arithmetic_op(doubleType, code); break; |
2435 case Bytecodes::_dsub : arithmetic_op(doubleType, code); break; |
2432 case Bytecodes::_imul : arithmetic_op(intType , code); break; |
2436 case Bytecodes::_imul : arithmetic_op(intType , code); break; |
2433 case Bytecodes::_lmul : arithmetic_op(longType , code); break; |
2437 case Bytecodes::_lmul : arithmetic_op(longType , code); break; |
2434 case Bytecodes::_fmul : arithmetic_op(floatType , code); break; |
2438 case Bytecodes::_fmul : arithmetic_op(floatType , code); break; |
2435 case Bytecodes::_dmul : arithmetic_op(doubleType, code); break; |
2439 case Bytecodes::_dmul : arithmetic_op(doubleType, code); break; |
2436 case Bytecodes::_idiv : arithmetic_op(intType , code, lock_stack()); break; |
2440 case Bytecodes::_idiv : arithmetic_op(intType , code, copy_state_for_exception()); break; |
2437 case Bytecodes::_ldiv : arithmetic_op(longType , code, lock_stack()); break; |
2441 case Bytecodes::_ldiv : arithmetic_op(longType , code, copy_state_for_exception()); break; |
2438 case Bytecodes::_fdiv : arithmetic_op(floatType , code); break; |
2442 case Bytecodes::_fdiv : arithmetic_op(floatType , code); break; |
2439 case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break; |
2443 case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break; |
2440 case Bytecodes::_irem : arithmetic_op(intType , code, lock_stack()); break; |
2444 case Bytecodes::_irem : arithmetic_op(intType , code, copy_state_for_exception()); break; |
2441 case Bytecodes::_lrem : arithmetic_op(longType , code, lock_stack()); break; |
2445 case Bytecodes::_lrem : arithmetic_op(longType , code, copy_state_for_exception()); break; |
2442 case Bytecodes::_frem : arithmetic_op(floatType , code); break; |
2446 case Bytecodes::_frem : arithmetic_op(floatType , code); break; |
2443 case Bytecodes::_drem : arithmetic_op(doubleType, code); break; |
2447 case Bytecodes::_drem : arithmetic_op(doubleType, code); break; |
2444 case Bytecodes::_ineg : negate_op(intType ); break; |
2448 case Bytecodes::_ineg : negate_op(intType ); break; |
2445 case Bytecodes::_lneg : negate_op(longType ); break; |
2449 case Bytecodes::_lneg : negate_op(longType ); break; |
2446 case Bytecodes::_fneg : negate_op(floatType ); break; |
2450 case Bytecodes::_fneg : negate_op(floatType ); break; |
2513 case Bytecodes::_invokedynamic : // fall through |
2517 case Bytecodes::_invokedynamic : // fall through |
2514 case Bytecodes::_invokeinterface: invoke(code); break; |
2518 case Bytecodes::_invokeinterface: invoke(code); break; |
2515 case Bytecodes::_new : new_instance(s.get_index_u2()); break; |
2519 case Bytecodes::_new : new_instance(s.get_index_u2()); break; |
2516 case Bytecodes::_newarray : new_type_array(); break; |
2520 case Bytecodes::_newarray : new_type_array(); break; |
2517 case Bytecodes::_anewarray : new_object_array(); break; |
2521 case Bytecodes::_anewarray : new_object_array(); break; |
2518 case Bytecodes::_arraylength : ipush(append(new ArrayLength(apop(), lock_stack()))); break; |
2522 case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; } |
2519 case Bytecodes::_athrow : throw_op(s.cur_bci()); break; |
2523 case Bytecodes::_athrow : throw_op(s.cur_bci()); break; |
2520 case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break; |
2524 case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break; |
2521 case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break; |
2525 case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break; |
2522 // Note: we do not have special handling for the monitorenter bytecode if DeoptC1 && DeoptOnAsyncException |
|
2523 case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break; |
2526 case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break; |
2524 case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break; |
2527 case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break; |
2525 case Bytecodes::_wide : ShouldNotReachHere(); break; |
2528 case Bytecodes::_wide : ShouldNotReachHere(); break; |
2526 case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break; |
2529 case Bytecodes::_multianewarray : new_multi_array(s.cur_bcp()[3]); break; |
2527 case Bytecodes::_ifnull : if_null(objectType, If::eql); break; |
2530 case Bytecodes::_ifnull : if_null(objectType, If::eql); break; |
2544 // if there are any, check if last instruction is a BlockEnd instruction |
2547 // if there are any, check if last instruction is a BlockEnd instruction |
2545 BlockEnd* end = last()->as_BlockEnd(); |
2548 BlockEnd* end = last()->as_BlockEnd(); |
2546 if (end == NULL) { |
2549 if (end == NULL) { |
2547 // all blocks must end with a BlockEnd instruction => add a Goto |
2550 // all blocks must end with a BlockEnd instruction => add a Goto |
2548 end = new Goto(block_at(s.cur_bci()), false); |
2551 end = new Goto(block_at(s.cur_bci()), false); |
2549 _last = _last->set_next(end, prev_bci); |
2552 append(end); |
2550 } |
2553 } |
2551 assert(end == last()->as_BlockEnd(), "inconsistency"); |
2554 assert(end == last()->as_BlockEnd(), "inconsistency"); |
2552 |
2555 |
2553 // if the method terminates, we don't need the stack anymore |
2556 assert(end->state() != NULL, "state must already be present"); |
2554 if (end->as_Return() != NULL) { |
2557 assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw"); |
2555 state()->clear_stack(); |
|
2556 } else if (end->as_Throw() != NULL) { |
|
2557 // May have exception handler in caller scopes |
|
2558 state()->truncate_stack(scope()->lock_stack_size()); |
|
2559 } |
|
2560 |
2558 |
2561 // connect to begin & set state |
2559 // connect to begin & set state |
2562 // NOTE that inlining may have changed the block we are parsing |
2560 // NOTE that inlining may have changed the block we are parsing |
2563 block()->set_end(end); |
2561 block()->set_end(end); |
2564 end->set_state(state()); |
|
2565 // propagate state |
2562 // propagate state |
2566 for (int i = end->number_of_sux() - 1; i >= 0; i--) { |
2563 for (int i = end->number_of_sux() - 1; i >= 0; i--) { |
2567 BlockBegin* sux = end->sux_at(i); |
2564 BlockBegin* sux = end->sux_at(i); |
2568 assert(sux->is_predecessor(block()), "predecessor missing"); |
2565 assert(sux->is_predecessor(block()), "predecessor missing"); |
2569 // be careful, bailout if bytecodes are strange |
2566 // be careful, bailout if bytecodes are strange |
2570 if (!sux->try_merge(state())) BAILOUT_("block join failed", NULL); |
2567 if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL); |
2571 scope_data()->add_to_work_list(end->sux_at(i)); |
2568 scope_data()->add_to_work_list(end->sux_at(i)); |
2572 } |
2569 } |
2573 |
2570 |
2574 scope_data()->set_stream(NULL); |
2571 scope_data()->set_stream(NULL); |
2575 |
2572 |
2655 , Bytecodes::_instanceof |
2651 , Bytecodes::_instanceof |
2656 , Bytecodes::_monitorenter |
2652 , Bytecodes::_monitorenter |
2657 , Bytecodes::_multianewarray |
2653 , Bytecodes::_multianewarray |
2658 }; |
2654 }; |
2659 |
2655 |
2660 // the following bytecodes are assumed to potentially |
|
2661 // throw asynchronous exceptions in compiled code due |
|
2662 // to safepoints (note: these entries could be merged |
|
2663 // with the can_trap_list - however, we need to know |
|
2664 // which ones are asynchronous for now - see also the |
|
2665 // comment in GraphBuilder::handle_exception) |
|
2666 Bytecodes::Code is_async_list[] = |
|
2667 { Bytecodes::_ifeq |
|
2668 , Bytecodes::_ifne |
|
2669 , Bytecodes::_iflt |
|
2670 , Bytecodes::_ifge |
|
2671 , Bytecodes::_ifgt |
|
2672 , Bytecodes::_ifle |
|
2673 , Bytecodes::_if_icmpeq |
|
2674 , Bytecodes::_if_icmpne |
|
2675 , Bytecodes::_if_icmplt |
|
2676 , Bytecodes::_if_icmpge |
|
2677 , Bytecodes::_if_icmpgt |
|
2678 , Bytecodes::_if_icmple |
|
2679 , Bytecodes::_if_acmpeq |
|
2680 , Bytecodes::_if_acmpne |
|
2681 , Bytecodes::_goto |
|
2682 , Bytecodes::_jsr |
|
2683 , Bytecodes::_ret |
|
2684 , Bytecodes::_tableswitch |
|
2685 , Bytecodes::_lookupswitch |
|
2686 , Bytecodes::_ireturn |
|
2687 , Bytecodes::_lreturn |
|
2688 , Bytecodes::_freturn |
|
2689 , Bytecodes::_dreturn |
|
2690 , Bytecodes::_areturn |
|
2691 , Bytecodes::_return |
|
2692 , Bytecodes::_ifnull |
|
2693 , Bytecodes::_ifnonnull |
|
2694 , Bytecodes::_goto_w |
|
2695 , Bytecodes::_jsr_w |
|
2696 }; |
|
2697 |
|
2698 // inititialize trap tables |
2656 // inititialize trap tables |
2699 for (int i = 0; i < Bytecodes::number_of_java_codes; i++) { |
2657 for (int i = 0; i < Bytecodes::number_of_java_codes; i++) { |
2700 _can_trap[i] = false; |
2658 _can_trap[i] = false; |
2701 _is_async[i] = false; |
|
2702 } |
2659 } |
2703 // set standard trap info |
2660 // set standard trap info |
2704 for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) { |
2661 for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) { |
2705 _can_trap[can_trap_list[j]] = true; |
2662 _can_trap[can_trap_list[j]] = true; |
2706 } |
|
2707 |
|
2708 // We now deoptimize if an asynchronous exception is thrown. This |
|
2709 // considerably cleans up corner case issues related to javac's |
|
2710 // incorrect exception handler ranges for async exceptions and |
|
2711 // allows us to precisely analyze the types of exceptions from |
|
2712 // certain bytecodes. |
|
2713 if (!(DeoptC1 && DeoptOnAsyncException)) { |
|
2714 // set asynchronous trap info |
|
2715 for (uint k = 0; k < ARRAY_SIZE(is_async_list); k++) { |
|
2716 assert(!_can_trap[is_async_list[k]], "can_trap_list and is_async_list should be disjoint"); |
|
2717 _can_trap[is_async_list[k]] = true; |
|
2718 _is_async[is_async_list[k]] = true; |
|
2719 } |
|
2720 } |
2663 } |
2721 } |
2664 } |
2722 |
2665 |
2723 |
2666 |
2724 BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) { |
2667 BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) { |
2766 osr_entry |
2709 osr_entry |
2767 ); |
2710 ); |
2768 start->set_next(base, 0); |
2711 start->set_next(base, 0); |
2769 start->set_end(base); |
2712 start->set_end(base); |
2770 // create & setup state for start block |
2713 // create & setup state for start block |
2771 start->set_state(state->copy()); |
2714 start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci())); |
2772 base->set_state(state->copy()); |
2715 base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci())); |
2773 |
2716 |
2774 if (base->std_entry()->state() == NULL) { |
2717 if (base->std_entry()->state() == NULL) { |
2775 // setup states for header blocks |
2718 // setup states for header blocks |
2776 base->std_entry()->merge(state); |
2719 base->std_entry()->merge(state); |
2777 } |
2720 } |
2850 // the storage for the OSR buffer is freed manually in the LIRGenerator. |
2794 // the storage for the OSR buffer is freed manually in the LIRGenerator. |
2851 |
2795 |
2852 assert(state->caller_state() == NULL, "should be top scope"); |
2796 assert(state->caller_state() == NULL, "should be top scope"); |
2853 state->clear_locals(); |
2797 state->clear_locals(); |
2854 Goto* g = new Goto(target, false); |
2798 Goto* g = new Goto(target, false); |
2855 g->set_state(_state->copy()); |
|
2856 append(g); |
2799 append(g); |
2857 _osr_entry->set_end(g); |
2800 _osr_entry->set_end(g); |
2858 target->merge(_osr_entry->end()->state()); |
2801 target->merge(_osr_entry->end()->state()); |
2859 |
2802 |
2860 scope_data()->set_stream(NULL); |
2803 scope_data()->set_stream(NULL); |
2861 } |
2804 } |
2862 |
2805 |
2863 |
2806 |
2864 ValueStack* GraphBuilder::state_at_entry() { |
2807 ValueStack* GraphBuilder::state_at_entry() { |
2865 ValueStack* state = new ValueStack(scope(), method()->max_locals(), method()->max_stack()); |
2808 ValueStack* state = new ValueStack(scope(), NULL); |
2866 |
2809 |
2867 // Set up locals for receiver |
2810 // Set up locals for receiver |
2868 int idx = 0; |
2811 int idx = 0; |
2869 if (!method()->is_static()) { |
2812 if (!method()->is_static()) { |
2870 // we should always see the receiver |
2813 // we should always see the receiver |
2884 idx += type->size(); |
2827 idx += type->size(); |
2885 } |
2828 } |
2886 |
2829 |
2887 // lock synchronized method |
2830 // lock synchronized method |
2888 if (method()->is_synchronized()) { |
2831 if (method()->is_synchronized()) { |
2889 state->lock(scope(), NULL); |
2832 state->lock(NULL); |
2890 } |
2833 } |
2891 |
2834 |
2892 return state; |
2835 return state; |
2893 } |
2836 } |
2894 |
2837 |
2895 |
2838 |
2896 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) |
2839 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) |
2897 : _scope_data(NULL) |
2840 : _scope_data(NULL) |
2898 , _exception_state(NULL) |
|
2899 , _instruction_count(0) |
2841 , _instruction_count(0) |
2900 , _osr_entry(NULL) |
2842 , _osr_entry(NULL) |
2901 , _memory(new MemoryBuffer()) |
2843 , _memory(new MemoryBuffer()) |
2902 , _compilation(compilation) |
2844 , _compilation(compilation) |
2903 , _inline_bailout_msg(NULL) |
2845 , _inline_bailout_msg(NULL) |
2917 _initial_state = state_at_entry(); |
2859 _initial_state = state_at_entry(); |
2918 start_block->merge(_initial_state); |
2860 start_block->merge(_initial_state); |
2919 |
2861 |
2920 // complete graph |
2862 // complete graph |
2921 _vmap = new ValueMap(); |
2863 _vmap = new ValueMap(); |
2922 scope->compute_lock_stack_size(); |
|
2923 switch (scope->method()->intrinsic_id()) { |
2864 switch (scope->method()->intrinsic_id()) { |
2924 case vmIntrinsics::_dabs : // fall through |
2865 case vmIntrinsics::_dabs : // fall through |
2925 case vmIntrinsics::_dsqrt : // fall through |
2866 case vmIntrinsics::_dsqrt : // fall through |
2926 case vmIntrinsics::_dsin : // fall through |
2867 case vmIntrinsics::_dsin : // fall through |
2927 case vmIntrinsics::_dcos : // fall through |
2868 case vmIntrinsics::_dcos : // fall through |
2943 scope_data()->set_stream(&s); |
2884 scope_data()->set_stream(&s); |
2944 s.next(); |
2885 s.next(); |
2945 |
2886 |
2946 // setup the initial block state |
2887 // setup the initial block state |
2947 _block = start_block; |
2888 _block = start_block; |
2948 _state = start_block->state()->copy(); |
2889 _state = start_block->state()->copy_for_parsing(); |
2949 _last = start_block; |
2890 _last = start_block; |
2950 load_local(doubleType, 0); |
2891 load_local(doubleType, 0); |
2951 |
2892 |
2952 // Emit the intrinsic node. |
2893 // Emit the intrinsic node. |
2953 bool result = try_inline_intrinsics(scope->method()); |
2894 bool result = try_inline_intrinsics(scope->method()); |
2986 if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count); |
2926 if (PrintCompilation && Verbose) tty->print_cr("Created %d Instructions", _instruction_count); |
2987 #endif |
2927 #endif |
2988 } |
2928 } |
2989 |
2929 |
2990 |
2930 |
2991 ValueStack* GraphBuilder::lock_stack() { |
2931 ValueStack* GraphBuilder::copy_state_before() { |
2992 // return a new ValueStack representing just the current lock stack |
2932 return copy_state_before_with_bci(bci()); |
2993 // (for debug info at safepoints in exception throwing or handling) |
2933 } |
2994 ValueStack* new_stack = state()->copy_locks(); |
2934 |
2995 return new_stack; |
2935 ValueStack* GraphBuilder::copy_state_exhandling() { |
2996 } |
2936 return copy_state_exhandling_with_bci(bci()); |
2997 |
2937 } |
|
2938 |
|
2939 ValueStack* GraphBuilder::copy_state_for_exception() { |
|
2940 return copy_state_for_exception_with_bci(bci()); |
|
2941 } |
|
2942 |
|
2943 ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) { |
|
2944 return state()->copy(ValueStack::StateBefore, bci); |
|
2945 } |
|
2946 |
|
2947 ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) { |
|
2948 if (!has_handler()) return NULL; |
|
2949 return state()->copy(ValueStack::StateBefore, bci); |
|
2950 } |
|
2951 |
|
2952 ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) { |
|
2953 ValueStack* s = copy_state_exhandling_with_bci(bci); |
|
2954 if (s == NULL) { |
|
2955 if (_compilation->env()->jvmti_can_access_local_variables()) { |
|
2956 s = state()->copy(ValueStack::ExceptionState, bci); |
|
2957 } else { |
|
2958 s = state()->copy(ValueStack::EmptyExceptionState, bci); |
|
2959 } |
|
2960 } |
|
2961 return s; |
|
2962 } |
2998 |
2963 |
2999 int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const { |
2964 int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const { |
3000 int recur_level = 0; |
2965 int recur_level = 0; |
3001 for (IRScope* s = scope(); s != NULL; s = s->caller()) { |
2966 for (IRScope* s = scope(); s != NULL; s = s->caller()) { |
3002 if (s->method() == cur_callee) { |
2967 if (s->method() == cur_callee) { |
3175 default : return false; // do not inline |
3140 default : return false; // do not inline |
3176 } |
3141 } |
3177 // create intrinsic node |
3142 // create intrinsic node |
3178 const bool has_receiver = !callee->is_static(); |
3143 const bool has_receiver = !callee->is_static(); |
3179 ValueType* result_type = as_ValueType(callee->return_type()); |
3144 ValueType* result_type = as_ValueType(callee->return_type()); |
|
3145 ValueStack* state_before = copy_state_for_exception(); |
3180 |
3146 |
3181 Values* args = state()->pop_arguments(callee->arg_size()); |
3147 Values* args = state()->pop_arguments(callee->arg_size()); |
3182 ValueStack* locks = lock_stack(); |
|
3183 |
3148 |
3184 if (is_profiling()) { |
3149 if (is_profiling()) { |
3185 // Don't profile in the special case where the root method |
3150 // Don't profile in the special case where the root method |
3186 // is the intrinsic |
3151 // is the intrinsic |
3187 if (callee != method()) { |
3152 if (callee != method()) { |
3196 profile_call(recv, NULL); |
3161 profile_call(recv, NULL); |
3197 } |
3162 } |
3198 } |
3163 } |
3199 } |
3164 } |
3200 |
3165 |
3201 Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, lock_stack(), |
3166 Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before, |
3202 preserves_state, cantrap); |
3167 preserves_state, cantrap); |
3203 // append instruction & push result |
3168 // append instruction & push result |
3204 Value value = append_split(result); |
3169 Value value = append_split(result); |
3205 if (result_type != voidType) push(result_type, value); |
3170 if (result_type != voidType) push(result_type, value); |
3206 |
3171 |
3234 |
3199 |
3235 BlockBegin* jsr_start_block = block_at(jsr_dest_bci); |
3200 BlockBegin* jsr_start_block = block_at(jsr_dest_bci); |
3236 assert(jsr_start_block != NULL, "jsr start block must exist"); |
3201 assert(jsr_start_block != NULL, "jsr start block must exist"); |
3237 assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet"); |
3202 assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet"); |
3238 Goto* goto_sub = new Goto(jsr_start_block, false); |
3203 Goto* goto_sub = new Goto(jsr_start_block, false); |
3239 goto_sub->set_state(state()); |
|
3240 // Must copy state to avoid wrong sharing when parsing bytecodes |
3204 // Must copy state to avoid wrong sharing when parsing bytecodes |
3241 assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block"); |
3205 assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block"); |
3242 jsr_start_block->set_state(state()->copy()); |
3206 jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci)); |
3243 append(goto_sub); |
3207 append(goto_sub); |
3244 _block->set_end(goto_sub); |
3208 _block->set_end(goto_sub); |
3245 _last = _block = jsr_start_block; |
3209 _last = _block = jsr_start_block; |
3246 |
3210 |
3247 // Clear out bytecode stream |
3211 // Clear out bytecode stream |
3288 // guaranteed to be non-null by the explicit null check at the |
3252 // guaranteed to be non-null by the explicit null check at the |
3289 // beginning of inlining. |
3253 // beginning of inlining. |
3290 void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) { |
3254 void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) { |
3291 assert(lock != NULL && sync_handler != NULL, "lock or handler missing"); |
3255 assert(lock != NULL && sync_handler != NULL, "lock or handler missing"); |
3292 |
3256 |
3293 set_exception_state(state()->copy()); |
|
3294 monitorenter(lock, SynchronizationEntryBCI); |
3257 monitorenter(lock, SynchronizationEntryBCI); |
3295 assert(_last->as_MonitorEnter() != NULL, "monitor enter expected"); |
3258 assert(_last->as_MonitorEnter() != NULL, "monitor enter expected"); |
3296 _last->set_needs_null_check(false); |
3259 _last->set_needs_null_check(false); |
3297 |
3260 |
3298 sync_handler->set(BlockBegin::exception_entry_flag); |
3261 sync_handler->set(BlockBegin::exception_entry_flag); |
3330 assert(exception->is_pinned(), "must be"); |
3293 assert(exception->is_pinned(), "must be"); |
3331 |
3294 |
3332 int bci = SynchronizationEntryBCI; |
3295 int bci = SynchronizationEntryBCI; |
3333 if (lock) { |
3296 if (lock) { |
3334 assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing"); |
3297 assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing"); |
3335 if (lock->bci() == -99) { |
3298 if (!lock->is_linked()) { |
3336 lock = append_with_bci(lock, -1); |
3299 lock = append_with_bci(lock, -1); |
3337 } |
3300 } |
3338 |
3301 |
3339 // exit the monitor in the context of the synchronized method |
3302 // exit the monitor in the context of the synchronized method |
3340 monitorexit(lock, SynchronizationEntryBCI); |
3303 monitorexit(lock, SynchronizationEntryBCI); |
3341 |
3304 |
3342 // exit the context of the synchronized method |
3305 // exit the context of the synchronized method |
3343 if (!default_handler) { |
3306 if (!default_handler) { |
3344 pop_scope(); |
3307 pop_scope(); |
3345 _state = _state->copy(); |
3308 bci = _state->caller_state()->bci(); |
3346 bci = _state->scope()->caller_bci(); |
3309 _state = _state->caller_state()->copy_for_parsing(); |
3347 _state = _state->pop_scope()->copy(); |
|
3348 } |
3310 } |
3349 } |
3311 } |
3350 |
3312 |
3351 // perform the throw as if at the the call site |
3313 // perform the throw as if at the the call site |
3352 apush(exception); |
3314 apush(exception); |
3353 |
|
3354 set_exception_state(state()->copy()); |
|
3355 throw_op(bci); |
3315 throw_op(bci); |
3356 |
3316 |
3357 BlockEnd* end = last()->as_BlockEnd(); |
3317 BlockEnd* end = last()->as_BlockEnd(); |
3358 block()->set_end(end); |
3318 block()->set_end(end); |
3359 end->set_state(state()); |
|
3360 |
3319 |
3361 _block = orig_block; |
3320 _block = orig_block; |
3362 _state = orig_state; |
3321 _state = orig_state; |
3363 _last = orig_last; |
3322 _last = orig_last; |
3364 } |
3323 } |
3485 scope_data()->set_stream(scope_data()->parent()->stream()); |
3444 scope_data()->set_stream(scope_data()->parent()->stream()); |
3486 |
3445 |
3487 // Pass parameters into callee state: add assignments |
3446 // Pass parameters into callee state: add assignments |
3488 // note: this will also ensure that all arguments are computed before being passed |
3447 // note: this will also ensure that all arguments are computed before being passed |
3489 ValueStack* callee_state = state(); |
3448 ValueStack* callee_state = state(); |
3490 ValueStack* caller_state = scope()->caller_state(); |
3449 ValueStack* caller_state = state()->caller_state(); |
3491 { int i = args_base; |
3450 { int i = args_base; |
3492 while (i < caller_state->stack_size()) { |
3451 while (i < caller_state->stack_size()) { |
3493 const int par_no = i - args_base; |
3452 const int par_no = i - args_base; |
3494 Value arg = caller_state->stack_at_inc(i); |
3453 Value arg = caller_state->stack_at_inc(i); |
3495 // NOTE: take base() of arg->type() to avoid problems storing |
3454 // NOTE: take base() of arg->type() to avoid problems storing |
3500 |
3459 |
3501 // Remove args from stack. |
3460 // Remove args from stack. |
3502 // Note that we preserve locals state in case we can use it later |
3461 // Note that we preserve locals state in case we can use it later |
3503 // (see use of pop_scope() below) |
3462 // (see use of pop_scope() below) |
3504 caller_state->truncate_stack(args_base); |
3463 caller_state->truncate_stack(args_base); |
3505 callee_state->truncate_stack(args_base); |
3464 assert(callee_state->stack_size() == 0, "callee stack must be empty"); |
3506 |
|
3507 // Setup state that is used at returns form the inlined method. |
|
3508 // This is essentially the state of the continuation block, |
|
3509 // but without the return value on stack, if any, this will |
|
3510 // be pushed at the return instruction (see method_return). |
|
3511 scope_data()->set_continuation_state(caller_state->copy()); |
|
3512 |
|
3513 // Compute lock stack size for callee scope now that args have been passed |
|
3514 scope()->compute_lock_stack_size(); |
|
3515 |
3465 |
3516 Value lock; |
3466 Value lock; |
3517 BlockBegin* sync_handler; |
3467 BlockBegin* sync_handler; |
3518 |
3468 |
3519 // Inline the locking of the receiver if the callee is synchronized |
3469 // Inline the locking of the receiver if the callee is synchronized |
3520 if (callee->is_synchronized()) { |
3470 if (callee->is_synchronized()) { |
3521 lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror()))) |
3471 lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror()))) |
3522 : state()->local_at(0); |
3472 : state()->local_at(0); |
3523 sync_handler = new BlockBegin(-1); |
3473 sync_handler = new BlockBegin(SynchronizationEntryBCI); |
3524 inline_sync_entry(lock, sync_handler); |
3474 inline_sync_entry(lock, sync_handler); |
3525 |
|
3526 // recompute the lock stack size |
|
3527 scope()->compute_lock_stack_size(); |
|
3528 } |
3475 } |
3529 |
3476 |
3530 |
3477 |
3531 BlockBegin* callee_start_block = block_at(0); |
3478 BlockBegin* callee_start_block = block_at(0); |
3532 if (callee_start_block != NULL) { |
3479 if (callee_start_block != NULL) { |
3533 assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header"); |
3480 assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header"); |
3534 Goto* goto_callee = new Goto(callee_start_block, false); |
3481 Goto* goto_callee = new Goto(callee_start_block, false); |
3535 goto_callee->set_state(state()); |
|
3536 // The state for this goto is in the scope of the callee, so use |
3482 // The state for this goto is in the scope of the callee, so use |
3537 // the entry bci for the callee instead of the call site bci. |
3483 // the entry bci for the callee instead of the call site bci. |
3538 append_with_bci(goto_callee, 0); |
3484 append_with_bci(goto_callee, 0); |
3539 _block->set_end(goto_callee); |
3485 _block->set_end(goto_callee); |
3540 callee_start_block->merge(callee_state); |
3486 callee_start_block->merge(callee_state); |
3577 // is currently essential to making inlining profitable. |
3523 // is currently essential to making inlining profitable. |
3578 if ( num_returns() == 1 |
3524 if ( num_returns() == 1 |
3579 && block() == orig_block |
3525 && block() == orig_block |
3580 && block() == inline_cleanup_block()) { |
3526 && block() == inline_cleanup_block()) { |
3581 _last = inline_cleanup_return_prev(); |
3527 _last = inline_cleanup_return_prev(); |
3582 _state = inline_cleanup_state()->pop_scope(); |
3528 _state = inline_cleanup_state(); |
3583 } else if (continuation_preds == cont->number_of_preds()) { |
3529 } else if (continuation_preds == cont->number_of_preds()) { |
3584 // Inlining caused that the instructions after the invoke in the |
3530 // Inlining caused that the instructions after the invoke in the |
3585 // caller are not reachable any more. So skip filling this block |
3531 // caller are not reachable any more. So skip filling this block |
3586 // with instructions! |
3532 // with instructions! |
3587 assert (cont == continuation(), ""); |
3533 assert (cont == continuation(), ""); |
3643 // this scope can be inlined directly into the caller so remove |
3589 // this scope can be inlined directly into the caller so remove |
3644 // the block at bci 0. |
3590 // the block at bci 0. |
3645 blb.bci2block()->at_put(0, NULL); |
3591 blb.bci2block()->at_put(0, NULL); |
3646 } |
3592 } |
3647 |
3593 |
3648 callee_scope->set_caller_state(state()); |
3594 set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci()))); |
3649 set_state(state()->push_scope(callee_scope)); |
|
3650 |
3595 |
3651 ScopeData* data = new ScopeData(scope_data()); |
3596 ScopeData* data = new ScopeData(scope_data()); |
3652 data->set_scope(callee_scope); |
3597 data->set_scope(callee_scope); |
3653 data->set_bci2block(blb.bci2block()); |
3598 data->set_bci2block(blb.bci2block()); |
3654 data->set_continuation(continuation); |
3599 data->set_continuation(continuation); |
3794 |
3736 |
3795 // An unsafe CAS can alias with other field accesses, but we don't |
3737 // An unsafe CAS can alias with other field accesses, but we don't |
3796 // know which ones so mark the state as no preserved. This will |
3738 // know which ones so mark the state as no preserved. This will |
3797 // cause CSE to invalidate memory across it. |
3739 // cause CSE to invalidate memory across it. |
3798 bool preserves_state = false; |
3740 bool preserves_state = false; |
3799 Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, lock_stack(), preserves_state); |
3741 Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state); |
3800 append_split(result); |
3742 append_split(result); |
3801 push(result_type, result); |
3743 push(result_type, result); |
3802 compilation()->set_has_unsafe_access(true); |
3744 compilation()->set_has_unsafe_access(true); |
3803 } |
3745 } |
3804 |
3746 |