359 } |
359 } |
360 |
360 |
361 |
361 |
362 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo* info) { |
362 void LIR_Assembler::jobject2reg_with_patching(Register reg, CodeEmitInfo* info) { |
363 jobject o = NULL; |
363 jobject o = NULL; |
364 PatchingStub* patch = new PatchingStub(_masm, PatchingStub::load_klass_id); |
364 PatchingStub* patch = new PatchingStub(_masm, PatchingStub::load_mirror_id); |
365 __ movoop(reg, o); |
365 __ movoop(reg, o); |
366 patching_epilog(patch, lir_patch_normal, reg, info); |
366 patching_epilog(patch, lir_patch_normal, reg, info); |
367 } |
367 } |
368 |
368 |
|
369 void LIR_Assembler::klass2reg_with_patching(Register reg, CodeEmitInfo* info) { |
|
370 Metadata* o = NULL; |
|
371 PatchingStub* patch = new PatchingStub(_masm, PatchingStub::load_klass_id); |
|
372 __ mov_metadata(reg, o); |
|
373 patching_epilog(patch, lir_patch_normal, reg, info); |
|
374 } |
369 |
375 |
370 // This specifies the rsp decrement needed to build the frame |
376 // This specifies the rsp decrement needed to build the frame |
371 int LIR_Assembler::initial_frame_size_in_bytes() { |
377 int LIR_Assembler::initial_frame_size_in_bytes() { |
372 // if rounding, must let FrameMap know! |
378 // if rounding, must let FrameMap know! |
373 |
379 |
446 } |
452 } |
447 |
453 |
448 if (compilation()->env()->dtrace_method_probes()) { |
454 if (compilation()->env()->dtrace_method_probes()) { |
449 __ get_thread(rax); |
455 __ get_thread(rax); |
450 __ movptr(Address(rsp, 0), rax); |
456 __ movptr(Address(rsp, 0), rax); |
451 __ movoop(Address(rsp, sizeof(void*)), method()->constant_encoding()); |
457 __ mov_metadata(Address(rsp, sizeof(void*)), method()->constant_encoding()); |
452 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit))); |
458 __ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit))); |
453 } |
459 } |
454 |
460 |
455 if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) { |
461 if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) { |
456 __ mov(rax, rsi); // Restore the exception |
462 __ mov(rax, rsi); // Restore the exception |
663 case T_OBJECT: { |
669 case T_OBJECT: { |
664 if (patch_code != lir_patch_none) { |
670 if (patch_code != lir_patch_none) { |
665 jobject2reg_with_patching(dest->as_register(), info); |
671 jobject2reg_with_patching(dest->as_register(), info); |
666 } else { |
672 } else { |
667 __ movoop(dest->as_register(), c->as_jobject()); |
673 __ movoop(dest->as_register(), c->as_jobject()); |
|
674 } |
|
675 break; |
|
676 } |
|
677 |
|
678 case T_METADATA: { |
|
679 if (patch_code != lir_patch_none) { |
|
680 klass2reg_with_patching(dest->as_register(), info); |
|
681 } else { |
|
682 __ mov_metadata(dest->as_register(), c->as_metadata()); |
668 } |
683 } |
669 break; |
684 break; |
670 } |
685 } |
671 |
686 |
672 case T_FLOAT: { |
687 case T_FLOAT: { |
1568 } |
1583 } |
1569 |
1584 |
1570 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) { |
1585 void LIR_Assembler::emit_alloc_obj(LIR_OpAllocObj* op) { |
1571 if (op->init_check()) { |
1586 if (op->init_check()) { |
1572 __ cmpb(Address(op->klass()->as_register(), |
1587 __ cmpb(Address(op->klass()->as_register(), |
1573 instanceKlass::init_state_offset()), |
1588 InstanceKlass::init_state_offset()), |
1574 instanceKlass::fully_initialized); |
1589 InstanceKlass::fully_initialized); |
1575 add_debug_info_for_null_check_here(op->stub()->info()); |
1590 add_debug_info_for_null_check_here(op->stub()->info()); |
1576 __ jcc(Assembler::notEqual, *op->stub()->entry()); |
1591 __ jcc(Assembler::notEqual, *op->stub()->entry()); |
1577 } |
1592 } |
1578 __ allocate_object(op->obj()->as_register(), |
1593 __ allocate_object(op->obj()->as_register(), |
1579 op->tmp1()->as_register(), |
1594 op->tmp1()->as_register(), |
1685 select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1); |
1700 select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1); |
1686 } |
1701 } |
1687 |
1702 |
1688 assert_different_registers(obj, k_RInfo, klass_RInfo); |
1703 assert_different_registers(obj, k_RInfo, klass_RInfo); |
1689 if (!k->is_loaded()) { |
1704 if (!k->is_loaded()) { |
1690 jobject2reg_with_patching(k_RInfo, op->info_for_patch()); |
1705 klass2reg_with_patching(k_RInfo, op->info_for_patch()); |
1691 } else { |
1706 } else { |
1692 #ifdef _LP64 |
1707 #ifdef _LP64 |
1693 __ movoop(k_RInfo, k->constant_encoding()); |
1708 __ mov_metadata(k_RInfo, k->constant_encoding()); |
1694 #endif // _LP64 |
1709 #endif // _LP64 |
1695 } |
1710 } |
1696 assert(obj != k_RInfo, "must be different"); |
1711 assert(obj != k_RInfo, "must be different"); |
1697 |
1712 |
1698 __ cmpptr(obj, (int32_t)NULL_WORD); |
1713 __ cmpptr(obj, (int32_t)NULL_WORD); |
1699 if (op->should_profile()) { |
1714 if (op->should_profile()) { |
1700 Label not_null; |
1715 Label not_null; |
1701 __ jccb(Assembler::notEqual, not_null); |
1716 __ jccb(Assembler::notEqual, not_null); |
1702 // Object is null; update MDO and exit |
1717 // Object is null; update MDO and exit |
1703 Register mdo = klass_RInfo; |
1718 Register mdo = klass_RInfo; |
1704 __ movoop(mdo, md->constant_encoding()); |
1719 __ mov_metadata(mdo, md->constant_encoding()); |
1705 Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset())); |
1720 Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset())); |
1706 int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant()); |
1721 int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant()); |
1707 __ orl(data_addr, header_bits); |
1722 __ orl(data_addr, header_bits); |
1708 __ jmp(*obj_is_null); |
1723 __ jmp(*obj_is_null); |
1709 __ bind(not_null); |
1724 __ bind(not_null); |
1714 |
1729 |
1715 if (op->fast_check()) { |
1730 if (op->fast_check()) { |
1716 // get object class |
1731 // get object class |
1717 // not a safepoint as obj null check happens earlier |
1732 // not a safepoint as obj null check happens earlier |
1718 #ifdef _LP64 |
1733 #ifdef _LP64 |
1719 if (UseCompressedOops) { |
1734 if (UseCompressedKlassPointers) { |
1720 __ load_klass(Rtmp1, obj); |
1735 __ load_klass(Rtmp1, obj); |
1721 __ cmpptr(k_RInfo, Rtmp1); |
1736 __ cmpptr(k_RInfo, Rtmp1); |
1722 } else { |
1737 } else { |
1723 __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes())); |
1738 __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes())); |
1724 } |
1739 } |
1725 #else |
1740 #else |
1726 if (k->is_loaded()) { |
1741 if (k->is_loaded()) { |
1727 __ cmpoop(Address(obj, oopDesc::klass_offset_in_bytes()), k->constant_encoding()); |
1742 __ cmpklass(Address(obj, oopDesc::klass_offset_in_bytes()), k->constant_encoding()); |
1728 } else { |
1743 } else { |
1729 __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes())); |
1744 __ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes())); |
1730 } |
1745 } |
1731 #endif |
1746 #endif |
1732 __ jcc(Assembler::notEqual, *failure_target); |
1747 __ jcc(Assembler::notEqual, *failure_target); |
1738 if (k->is_loaded()) { |
1753 if (k->is_loaded()) { |
1739 // See if we get an immediate positive hit |
1754 // See if we get an immediate positive hit |
1740 #ifdef _LP64 |
1755 #ifdef _LP64 |
1741 __ cmpptr(k_RInfo, Address(klass_RInfo, k->super_check_offset())); |
1756 __ cmpptr(k_RInfo, Address(klass_RInfo, k->super_check_offset())); |
1742 #else |
1757 #else |
1743 __ cmpoop(Address(klass_RInfo, k->super_check_offset()), k->constant_encoding()); |
1758 __ cmpklass(Address(klass_RInfo, k->super_check_offset()), k->constant_encoding()); |
1744 #endif // _LP64 |
1759 #endif // _LP64 |
1745 if ((juint)in_bytes(Klass::secondary_super_cache_offset()) != k->super_check_offset()) { |
1760 if ((juint)in_bytes(Klass::secondary_super_cache_offset()) != k->super_check_offset()) { |
1746 __ jcc(Assembler::notEqual, *failure_target); |
1761 __ jcc(Assembler::notEqual, *failure_target); |
1747 // successful cast, fall through to profile or jump |
1762 // successful cast, fall through to profile or jump |
1748 } else { |
1763 } else { |
1750 __ jcc(Assembler::equal, *success_target); |
1765 __ jcc(Assembler::equal, *success_target); |
1751 // check for self |
1766 // check for self |
1752 #ifdef _LP64 |
1767 #ifdef _LP64 |
1753 __ cmpptr(klass_RInfo, k_RInfo); |
1768 __ cmpptr(klass_RInfo, k_RInfo); |
1754 #else |
1769 #else |
1755 __ cmpoop(klass_RInfo, k->constant_encoding()); |
1770 __ cmpklass(klass_RInfo, k->constant_encoding()); |
1756 #endif // _LP64 |
1771 #endif // _LP64 |
1757 __ jcc(Assembler::equal, *success_target); |
1772 __ jcc(Assembler::equal, *success_target); |
1758 |
1773 |
1759 __ push(klass_RInfo); |
1774 __ push(klass_RInfo); |
1760 #ifdef _LP64 |
1775 #ifdef _LP64 |
1761 __ push(k_RInfo); |
1776 __ push(k_RInfo); |
1762 #else |
1777 #else |
1763 __ pushoop(k->constant_encoding()); |
1778 __ pushklass(k->constant_encoding()); |
1764 #endif // _LP64 |
1779 #endif // _LP64 |
1765 __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); |
1780 __ call(RuntimeAddress(Runtime1::entry_for(Runtime1::slow_subtype_check_id))); |
1766 __ pop(klass_RInfo); |
1781 __ pop(klass_RInfo); |
1767 __ pop(klass_RInfo); |
1782 __ pop(klass_RInfo); |
1768 // result is a boolean |
1783 // result is a boolean |
1786 } |
1801 } |
1787 } |
1802 } |
1788 if (op->should_profile()) { |
1803 if (op->should_profile()) { |
1789 Register mdo = klass_RInfo, recv = k_RInfo; |
1804 Register mdo = klass_RInfo, recv = k_RInfo; |
1790 __ bind(profile_cast_success); |
1805 __ bind(profile_cast_success); |
1791 __ movoop(mdo, md->constant_encoding()); |
1806 __ mov_metadata(mdo, md->constant_encoding()); |
1792 __ load_klass(recv, obj); |
1807 __ load_klass(recv, obj); |
1793 Label update_done; |
1808 Label update_done; |
1794 type_profile_helper(mdo, md, data, recv, success); |
1809 type_profile_helper(mdo, md, data, recv, success); |
1795 __ jmp(*success); |
1810 __ jmp(*success); |
1796 |
1811 |
1797 __ bind(profile_cast_failure); |
1812 __ bind(profile_cast_failure); |
1798 __ movoop(mdo, md->constant_encoding()); |
1813 __ mov_metadata(mdo, md->constant_encoding()); |
1799 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
1814 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
1800 __ subptr(counter_addr, DataLayout::counter_increment); |
1815 __ subptr(counter_addr, DataLayout::counter_increment); |
1801 __ jmp(*failure); |
1816 __ jmp(*failure); |
1802 } |
1817 } |
1803 __ jmp(*success); |
1818 __ jmp(*success); |
1837 if (op->should_profile()) { |
1852 if (op->should_profile()) { |
1838 Label not_null; |
1853 Label not_null; |
1839 __ jccb(Assembler::notEqual, not_null); |
1854 __ jccb(Assembler::notEqual, not_null); |
1840 // Object is null; update MDO and exit |
1855 // Object is null; update MDO and exit |
1841 Register mdo = klass_RInfo; |
1856 Register mdo = klass_RInfo; |
1842 __ movoop(mdo, md->constant_encoding()); |
1857 __ mov_metadata(mdo, md->constant_encoding()); |
1843 Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset())); |
1858 Address data_addr(mdo, md->byte_offset_of_slot(data, DataLayout::header_offset())); |
1844 int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant()); |
1859 int header_bits = DataLayout::flag_mask_to_header_mask(BitData::null_seen_byte_constant()); |
1845 __ orl(data_addr, header_bits); |
1860 __ orl(data_addr, header_bits); |
1846 __ jmp(done); |
1861 __ jmp(done); |
1847 __ bind(not_null); |
1862 __ bind(not_null); |
1869 // fall through to the success case |
1884 // fall through to the success case |
1870 |
1885 |
1871 if (op->should_profile()) { |
1886 if (op->should_profile()) { |
1872 Register mdo = klass_RInfo, recv = k_RInfo; |
1887 Register mdo = klass_RInfo, recv = k_RInfo; |
1873 __ bind(profile_cast_success); |
1888 __ bind(profile_cast_success); |
1874 __ movoop(mdo, md->constant_encoding()); |
1889 __ mov_metadata(mdo, md->constant_encoding()); |
1875 __ load_klass(recv, value); |
1890 __ load_klass(recv, value); |
1876 Label update_done; |
1891 Label update_done; |
1877 type_profile_helper(mdo, md, data, recv, &done); |
1892 type_profile_helper(mdo, md, data, recv, &done); |
1878 __ jmpb(done); |
1893 __ jmpb(done); |
1879 |
1894 |
1880 __ bind(profile_cast_failure); |
1895 __ bind(profile_cast_failure); |
1881 __ movoop(mdo, md->constant_encoding()); |
1896 __ mov_metadata(mdo, md->constant_encoding()); |
1882 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
1897 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
1883 __ subptr(counter_addr, DataLayout::counter_increment); |
1898 __ subptr(counter_addr, DataLayout::counter_increment); |
1884 __ jmp(*stub->entry()); |
1899 __ jmp(*stub->entry()); |
1885 } |
1900 } |
1886 |
1901 |
2862 add_call_info(code_offset(), op->info()); |
2877 add_call_info(code_offset(), op->info()); |
2863 } |
2878 } |
2864 |
2879 |
2865 |
2880 |
2866 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) { |
2881 void LIR_Assembler::ic_call(LIR_OpJavaCall* op) { |
2867 RelocationHolder rh = virtual_call_Relocation::spec(pc()); |
2882 __ ic_call(op->addr()); |
2868 __ movoop(IC_Klass, (jobject)Universe::non_oop_word()); |
2883 add_call_info(code_offset(), op->info()); |
2869 assert(!os::is_MP() || |
2884 assert(!os::is_MP() || |
2870 (__ offset() + NativeCall::displacement_offset) % BytesPerWord == 0, |
2885 (__ offset() - NativeCall::instruction_size + NativeCall::displacement_offset) % BytesPerWord == 0, |
2871 "must be aligned"); |
2886 "must be aligned"); |
2872 __ call(AddressLiteral(op->addr(), rh)); |
|
2873 add_call_info(code_offset(), op->info()); |
|
2874 } |
2887 } |
2875 |
2888 |
2876 |
2889 |
2877 /* Currently, vtable-dispatch is only enabled for sparc platforms */ |
2890 /* Currently, vtable-dispatch is only enabled for sparc platforms */ |
2878 void LIR_Assembler::vtable_call(LIR_OpJavaCall* op) { |
2891 void LIR_Assembler::vtable_call(LIR_OpJavaCall* op) { |
2895 while (offset++ % BytesPerWord != 0) { |
2908 while (offset++ % BytesPerWord != 0) { |
2896 __ nop(); |
2909 __ nop(); |
2897 } |
2910 } |
2898 } |
2911 } |
2899 __ relocate(static_stub_Relocation::spec(call_pc)); |
2912 __ relocate(static_stub_Relocation::spec(call_pc)); |
2900 __ movoop(rbx, (jobject)NULL); |
2913 __ mov_metadata(rbx, (Metadata*)NULL); |
2901 // must be set to -1 at code generation time |
2914 // must be set to -1 at code generation time |
2902 assert(!os::is_MP() || ((__ offset() + 1) % BytesPerWord) == 0, "must be aligned on MP"); |
2915 assert(!os::is_MP() || ((__ offset() + 1) % BytesPerWord) == 0, "must be aligned on MP"); |
2903 // On 64bit this will die since it will take a movq & jmp, must be only a jmp |
2916 // On 64bit this will die since it will take a movq & jmp, must be only a jmp |
2904 __ jump(RuntimeAddress(__ pc())); |
2917 __ jump(RuntimeAddress(__ pc())); |
2905 |
2918 |
3256 |
3269 |
3257 if (flags & LIR_OpArrayCopy::type_check) { |
3270 if (flags & LIR_OpArrayCopy::type_check) { |
3258 // We don't know the array types are compatible |
3271 // We don't know the array types are compatible |
3259 if (basic_type != T_OBJECT) { |
3272 if (basic_type != T_OBJECT) { |
3260 // Simple test for basic type arrays |
3273 // Simple test for basic type arrays |
3261 if (UseCompressedOops) { |
3274 if (UseCompressedKlassPointers) { |
3262 __ movl(tmp, src_klass_addr); |
3275 __ movl(tmp, src_klass_addr); |
3263 __ cmpl(tmp, dst_klass_addr); |
3276 __ cmpl(tmp, dst_klass_addr); |
3264 } else { |
3277 } else { |
3265 __ movptr(tmp, src_klass_addr); |
3278 __ movptr(tmp, src_klass_addr); |
3266 __ cmpptr(tmp, dst_klass_addr); |
3279 __ cmpptr(tmp, dst_klass_addr); |
3416 // object array case, if no type check is needed then either the |
3429 // object array case, if no type check is needed then either the |
3417 // dst type is exactly the expected type and the src type is a |
3430 // dst type is exactly the expected type and the src type is a |
3418 // subtype which we can't check or src is the same array as dst |
3431 // subtype which we can't check or src is the same array as dst |
3419 // but not necessarily exactly of type default_type. |
3432 // but not necessarily exactly of type default_type. |
3420 Label known_ok, halt; |
3433 Label known_ok, halt; |
3421 __ movoop(tmp, default_type->constant_encoding()); |
3434 __ mov_metadata(tmp, default_type->constant_encoding()); |
3422 #ifdef _LP64 |
3435 #ifdef _LP64 |
3423 if (UseCompressedOops) { |
3436 if (UseCompressedKlassPointers) { |
3424 __ encode_heap_oop(tmp); |
3437 __ encode_heap_oop(tmp); |
3425 } |
3438 } |
3426 #endif |
3439 #endif |
3427 |
3440 |
3428 if (basic_type != T_OBJECT) { |
3441 if (basic_type != T_OBJECT) { |
3429 |
3442 |
3430 if (UseCompressedOops) __ cmpl(tmp, dst_klass_addr); |
3443 if (UseCompressedKlassPointers) __ cmpl(tmp, dst_klass_addr); |
3431 else __ cmpptr(tmp, dst_klass_addr); |
3444 else __ cmpptr(tmp, dst_klass_addr); |
3432 __ jcc(Assembler::notEqual, halt); |
3445 __ jcc(Assembler::notEqual, halt); |
3433 if (UseCompressedOops) __ cmpl(tmp, src_klass_addr); |
3446 if (UseCompressedKlassPointers) __ cmpl(tmp, src_klass_addr); |
3434 else __ cmpptr(tmp, src_klass_addr); |
3447 else __ cmpptr(tmp, src_klass_addr); |
3435 __ jcc(Assembler::equal, known_ok); |
3448 __ jcc(Assembler::equal, known_ok); |
3436 } else { |
3449 } else { |
3437 if (UseCompressedOops) __ cmpl(tmp, dst_klass_addr); |
3450 if (UseCompressedKlassPointers) __ cmpl(tmp, dst_klass_addr); |
3438 else __ cmpptr(tmp, dst_klass_addr); |
3451 else __ cmpptr(tmp, dst_klass_addr); |
3439 __ jcc(Assembler::equal, known_ok); |
3452 __ jcc(Assembler::equal, known_ok); |
3440 __ cmpptr(src, dst); |
3453 __ cmpptr(src, dst); |
3441 __ jcc(Assembler::equal, known_ok); |
3454 __ jcc(Assembler::equal, known_ok); |
3442 } |
3455 } |
3515 assert(md != NULL, "Sanity"); |
3528 assert(md != NULL, "Sanity"); |
3516 ciProfileData* data = md->bci_to_data(bci); |
3529 ciProfileData* data = md->bci_to_data(bci); |
3517 assert(data->is_CounterData(), "need CounterData for calls"); |
3530 assert(data->is_CounterData(), "need CounterData for calls"); |
3518 assert(op->mdo()->is_single_cpu(), "mdo must be allocated"); |
3531 assert(op->mdo()->is_single_cpu(), "mdo must be allocated"); |
3519 Register mdo = op->mdo()->as_register(); |
3532 Register mdo = op->mdo()->as_register(); |
3520 __ movoop(mdo, md->constant_encoding()); |
3533 __ mov_metadata(mdo, md->constant_encoding()); |
3521 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
3534 Address counter_addr(mdo, md->byte_offset_of_slot(data, CounterData::count_offset())); |
3522 Bytecodes::Code bc = method->java_code_at_bci(bci); |
3535 Bytecodes::Code bc = method->java_code_at_bci(bci); |
3523 const bool callee_is_static = callee->is_loaded() && callee->is_static(); |
3536 const bool callee_is_static = callee->is_loaded() && callee->is_static(); |
3524 // Perform additional virtual call profiling for invokevirtual and |
3537 // Perform additional virtual call profiling for invokevirtual and |
3525 // invokeinterface bytecodes |
3538 // invokeinterface bytecodes |
3531 assert_different_registers(mdo, recv); |
3544 assert_different_registers(mdo, recv); |
3532 assert(data->is_VirtualCallData(), "need VirtualCallData for virtual calls"); |
3545 assert(data->is_VirtualCallData(), "need VirtualCallData for virtual calls"); |
3533 ciKlass* known_klass = op->known_holder(); |
3546 ciKlass* known_klass = op->known_holder(); |
3534 if (C1OptimizeVirtualCallProfiling && known_klass != NULL) { |
3547 if (C1OptimizeVirtualCallProfiling && known_klass != NULL) { |
3535 // We know the type that will be seen at this call site; we can |
3548 // We know the type that will be seen at this call site; we can |
3536 // statically update the methodDataOop rather than needing to do |
3549 // statically update the MethodData* rather than needing to do |
3537 // dynamic tests on the receiver type |
3550 // dynamic tests on the receiver type |
3538 |
3551 |
3539 // NOTE: we should probably put a lock around this search to |
3552 // NOTE: we should probably put a lock around this search to |
3540 // avoid collisions by concurrent compilations |
3553 // avoid collisions by concurrent compilations |
3541 ciVirtualCallData* vc_data = (ciVirtualCallData*) data; |
3554 ciVirtualCallData* vc_data = (ciVirtualCallData*) data; |
3556 // VirtualCallData rather than just the first time |
3569 // VirtualCallData rather than just the first time |
3557 for (i = 0; i < VirtualCallData::row_limit(); i++) { |
3570 for (i = 0; i < VirtualCallData::row_limit(); i++) { |
3558 ciKlass* receiver = vc_data->receiver(i); |
3571 ciKlass* receiver = vc_data->receiver(i); |
3559 if (receiver == NULL) { |
3572 if (receiver == NULL) { |
3560 Address recv_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_offset(i))); |
3573 Address recv_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_offset(i))); |
3561 __ movoop(recv_addr, known_klass->constant_encoding()); |
3574 __ mov_metadata(recv_addr, known_klass->constant_encoding()); |
3562 Address data_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_count_offset(i))); |
3575 Address data_addr(mdo, md->byte_offset_of_slot(data, VirtualCallData::receiver_count_offset(i))); |
3563 __ addptr(data_addr, DataLayout::counter_increment); |
3576 __ addptr(data_addr, DataLayout::counter_increment); |
3564 return; |
3577 return; |
3565 } |
3578 } |
3566 } |
3579 } |