src/share/vm/c1/c1_LIR.cpp

changeset 8865
ffcdff41a92f
parent 8856
ac27a9c85bea
child 9126
bc5b8e3dcb6b
equal deleted inserted replaced
8864:e4aeef458496 8865:ffcdff41a92f
390 _block(block), 390 _block(block),
391 _ublock(ublock), 391 _ublock(ublock),
392 _stub(NULL) { 392 _stub(NULL) {
393 } 393 }
394 394
395 #endif 395 #endif
396
397 void LIR_OpBranch::change_block(BlockBegin* b) { 396 void LIR_OpBranch::change_block(BlockBegin* b) {
398 assert(_block != NULL, "must have old block"); 397 assert(_block != NULL, "must have old block");
399 assert(_block->label() == label(), "must be equal"); 398 assert(_block->label() == label(), "must be equal");
400 399
401 _block = b; 400 _block = b;
1340 patch_code, 1339 patch_code,
1341 info, lir_move_volatile)); 1340 info, lir_move_volatile));
1342 } 1341 }
1343 1342
1344 void LIR_List::volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code) { 1343 void LIR_List::volatile_load_unsafe_reg(LIR_Opr base, LIR_Opr offset, LIR_Opr dst, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code) {
1345 #ifndef MIPS64 1344 #ifdef MIPS64
1345 add(base, offset, base);
1346 offset = 0;
1347 #endif
1346 append(new LIR_Op1( 1348 append(new LIR_Op1(
1347 lir_move, 1349 lir_move,
1348 LIR_OprFact::address(new LIR_Address(base, offset, type)), 1350 LIR_OprFact::address(new LIR_Address(base, offset, type)),
1349 dst, 1351 dst,
1350 type, 1352 type,
1351 patch_code, 1353 patch_code,
1352 info, lir_move_volatile)); 1354 info, lir_move_volatile));
1353 #else
1354 add(base, offset, base);
1355 append(new LIR_Op1(
1356 lir_move,
1357 LIR_OprFact::address(new LIR_Address(base, 0, type)),
1358 dst,
1359 type,
1360 patch_code,
1361 info, lir_move_volatile));
1362 #endif
1363 } 1355 }
1364 1356
1365 1357
1366 void LIR_List::prefetch(LIR_Address* addr, bool is_store) { 1358 void LIR_List::prefetch(LIR_Address* addr, bool is_store) {
1367 append(new LIR_Op1( 1359 append(new LIR_Op1(
1413 info, 1405 info,
1414 lir_move_volatile)); 1406 lir_move_volatile));
1415 } 1407 }
1416 1408
1417 void LIR_List::volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code) { 1409 void LIR_List::volatile_store_unsafe_reg(LIR_Opr src, LIR_Opr base, LIR_Opr offset, BasicType type, CodeEmitInfo* info, LIR_PatchCode patch_code) {
1418 #ifndef MIPS64 1410 #ifdef MIPS64
1411 add(base, offset, base);
1412 offset = 0;
1413 #endif
1419 append(new LIR_Op1( 1414 append(new LIR_Op1(
1420 lir_move, 1415 lir_move,
1421 src, 1416 src,
1422 LIR_OprFact::address(new LIR_Address(base, offset, type)), 1417 LIR_OprFact::address(new LIR_Address(base, offset, type)),
1423 type, 1418 type,
1424 patch_code, 1419 patch_code,
1425 info, lir_move_volatile)); 1420 info, lir_move_volatile));
1426 #else
1427 add(base, offset, base);
1428 append(new LIR_Op1(
1429 lir_move,
1430 src,
1431 LIR_OprFact::address(new LIR_Address(base, 0, type)),
1432 type,
1433 patch_code,
1434 info, lir_move_volatile));
1435
1436 #endif
1437
1438 } 1421 }
1439 1422
1440 #ifdef MIPS64 1423 #ifdef MIPS64
1441 void LIR_List::frem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info) { 1424 void LIR_List::frem(LIR_Opr left, LIR_Opr right, LIR_Opr res, LIR_Opr tmp, CodeEmitInfo* info) {
1442 append(new LIR_Op3( 1425 append(new LIR_Op3(
1501 LIR_OprFact::address(new LIR_Address(base, disp, T_INT)), 1484 LIR_OprFact::address(new LIR_Address(base, disp, T_INT)),
1502 LIR_OprFact::intConst(c), 1485 LIR_OprFact::intConst(c),
1503 info)); 1486 info));
1504 } 1487 }
1505 1488
1506 void LIR_List::null_check(LIR_Opr opr, CodeEmitInfo* info, bool deoptimize_on_null) {
1507 if (deoptimize_on_null) {
1508 // Emit an explicit null check and deoptimize if opr is null
1509 CodeStub* deopt = new DeoptimizeStub(info);
1510 cmp(lir_cond_equal, opr, LIR_OprFact::oopConst(NULL));
1511 branch(lir_cond_equal, T_OBJECT, deopt);
1512 } else {
1513 // Emit an implicit null check
1514 append(new LIR_Op1(lir_null_check, opr, info));
1515 }
1516 }
1517
1518 void LIR_List::cmp_reg_mem(LIR_Condition condition, LIR_Opr reg, LIR_Address* addr, CodeEmitInfo* info) { 1489 void LIR_List::cmp_reg_mem(LIR_Condition condition, LIR_Opr reg, LIR_Address* addr, CodeEmitInfo* info) {
1519 append(new LIR_Op2( 1490 append(new LIR_Op2(
1520 lir_cmp, 1491 lir_cmp,
1521 condition, 1492 condition,
1522 reg, 1493 reg,
1523 LIR_OprFact::address(addr), 1494 LIR_OprFact::address(addr),
1524 info)); 1495 info));
1525 } 1496 }
1526 1497 #endif
1498
1499 void LIR_List::null_check(LIR_Opr opr, CodeEmitInfo* info, bool deoptimize_on_null) {
1500 if (deoptimize_on_null) {
1501 // Emit an explicit null check and deoptimize if opr is null
1502 CodeStub* deopt = new DeoptimizeStub(info);
1503 #ifndef MIPS64
1504 cmp(lir_cond_equal, opr, LIR_OprFact::oopConst(NULL));
1505 branch(lir_cond_equal, T_OBJECT, deopt);
1506 #else
1507 null_check_for_branch(lir_cond_equal, opr, LIR_OprFact::oopConst(NULL));
1508 branch(lir_cond_equal, opr, LIR_OprFact::oopConst(NULL), T_OBJECT, deopt);
1509 #endif
1510 } else {
1511 // Emit an implicit null check
1512 append(new LIR_Op1(lir_null_check, opr, info));
1513 }
1514 }
1515
1516 #ifndef MIPS64
1527 void LIR_List::allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, 1517 void LIR_List::allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4,
1528 int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub) { 1518 int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub) {
1529 append(new LIR_OpAllocObj( 1519 append(new LIR_OpAllocObj(
1530 klass, 1520 klass,
1531 dst, 1521 dst,
1550 t4, 1540 t4,
1551 type, 1541 type,
1552 stub)); 1542 stub));
1553 } 1543 }
1554 #else 1544 #else
1555 void LIR_List::allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, LIR_Opr t5, LIR_Opr t6, 1545 void LIR_List::allocate_object(LIR_Opr dst, LIR_Opr t1, LIR_Opr t2, LIR_Opr t3, LIR_Opr t4, LIR_Opr t5, LIR_Opr t6,
1556 int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub) { 1546 int header_size, int object_size, LIR_Opr klass, bool init_check, CodeStub* stub) {
1557 append(new LIR_OpAllocObj( 1547 append(new LIR_OpAllocObj(
1558 klass, 1548 klass,
1559 dst, 1549 dst,
1560 t1, 1550 t1,
1672 c->set_should_profile(true); 1662 c->set_should_profile(true);
1673 } 1663 }
1674 append(c); 1664 append(c);
1675 } 1665 }
1676 1666
1667
1677 void LIR_List::store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3, 1668 void LIR_List::store_check(LIR_Opr object, LIR_Opr array, LIR_Opr tmp1, LIR_Opr tmp2, LIR_Opr tmp3,
1678 CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci) { 1669 CodeEmitInfo* info_for_exception, ciMethod* profiled_method, int profiled_bci) {
1679 LIR_OpTypeCheck* c = new LIR_OpTypeCheck(lir_store_check, object, array, tmp1, tmp2, tmp3, info_for_exception); 1670 LIR_OpTypeCheck* c = new LIR_OpTypeCheck(lir_store_check, object, array, tmp1, tmp2, tmp3, info_for_exception);
1680 if (profiled_method != NULL) { 1671 if (profiled_method != NULL) {
1681 c->set_profiled_method(profiled_method); 1672 c->set_profiled_method(profiled_method);
1682 c->set_profiled_bci(profiled_bci); 1673 c->set_profiled_bci(profiled_bci);
1683 c->set_should_profile(true); 1674 c->set_should_profile(true);
1684 } 1675 }
1685 append(c); 1676 append(c);
1686 } 1677 }
1678
1687 1679
1688 #ifndef MIPS64 1680 #ifndef MIPS64
1689 void LIR_List::cas_long(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value, 1681 void LIR_List::cas_long(LIR_Opr addr, LIR_Opr cmp_value, LIR_Opr new_value,
1690 LIR_Opr t1, LIR_Opr t2, LIR_Opr result) { 1682 LIR_Opr t1, LIR_Opr t2, LIR_Opr result) {
1691 append(new LIR_OpCompareAndSwap(lir_cas_long, addr, cmp_value, new_value, t1, t2, result)); 1683 append(new LIR_OpCompareAndSwap(lir_cas_long, addr, cmp_value, new_value, t1, t2, result));

mercurial