480 |
494 |
481 void LIRGenerator::array_range_check(LIR_Opr array, LIR_Opr index, |
495 void LIRGenerator::array_range_check(LIR_Opr array, LIR_Opr index, |
482 CodeEmitInfo* null_check_info, CodeEmitInfo* range_check_info) { |
496 CodeEmitInfo* null_check_info, CodeEmitInfo* range_check_info) { |
483 CodeStub* stub = new RangeCheckStub(range_check_info, index); |
497 CodeStub* stub = new RangeCheckStub(range_check_info, index); |
484 if (index->is_constant()) { |
498 if (index->is_constant()) { |
|
499 #ifndef MIPS64 |
485 cmp_mem_int(lir_cond_belowEqual, array, arrayOopDesc::length_offset_in_bytes(), |
500 cmp_mem_int(lir_cond_belowEqual, array, arrayOopDesc::length_offset_in_bytes(), |
486 index->as_jint(), null_check_info); |
501 index->as_jint(), null_check_info); |
487 __ branch(lir_cond_belowEqual, T_INT, stub); // forward branch |
502 __ branch(lir_cond_belowEqual, T_INT, stub); // forward branch |
|
503 #else |
|
504 LIR_Opr left = LIR_OprFact::address(new LIR_Address(array, arrayOopDesc::length_offset_in_bytes(), T_INT)); |
|
505 LIR_Opr right = LIR_OprFact::intConst(index->as_jint()); |
|
506 __ null_check_for_branch(lir_cond_belowEqual, left, right, null_check_info); |
|
507 __ branch(lir_cond_belowEqual, left, right ,T_INT, stub); // forward branch |
|
508 #endif |
488 } else { |
509 } else { |
|
510 #ifndef MIPS64 |
489 cmp_reg_mem(lir_cond_aboveEqual, index, array, |
511 cmp_reg_mem(lir_cond_aboveEqual, index, array, |
490 arrayOopDesc::length_offset_in_bytes(), T_INT, null_check_info); |
512 arrayOopDesc::length_offset_in_bytes(), T_INT, null_check_info); |
491 __ branch(lir_cond_aboveEqual, T_INT, stub); // forward branch |
513 __ branch(lir_cond_aboveEqual, T_INT, stub); // forward branch |
|
514 #else |
|
515 LIR_Opr left = index; |
|
516 LIR_Opr right = LIR_OprFact::address(new LIR_Address( array, arrayOopDesc::length_offset_in_bytes(), T_INT)); |
|
517 __ null_check_for_branch(lir_cond_aboveEqual, left, right, null_check_info); |
|
518 __ branch(lir_cond_aboveEqual,left, right ,T_INT, stub); // forward branch |
|
519 #endif |
492 } |
520 } |
493 } |
521 } |
494 |
522 |
495 |
523 |
496 void LIRGenerator::nio_range_check(LIR_Opr buffer, LIR_Opr index, LIR_Opr result, CodeEmitInfo* info) { |
524 void LIRGenerator::nio_range_check(LIR_Opr buffer, LIR_Opr index, LIR_Opr result, CodeEmitInfo* info) { |
497 CodeStub* stub = new RangeCheckStub(info, index, true); |
525 CodeStub* stub = new RangeCheckStub(info, index, true); |
498 if (index->is_constant()) { |
526 if (index->is_constant()) { |
|
527 #ifndef MIPS64 |
499 cmp_mem_int(lir_cond_belowEqual, buffer, java_nio_Buffer::limit_offset(), index->as_jint(), info); |
528 cmp_mem_int(lir_cond_belowEqual, buffer, java_nio_Buffer::limit_offset(), index->as_jint(), info); |
500 __ branch(lir_cond_belowEqual, T_INT, stub); // forward branch |
529 __ branch(lir_cond_belowEqual, T_INT, stub); // forward branch |
|
530 #else |
|
531 LIR_Opr left = LIR_OprFact::address(new LIR_Address(buffer, java_nio_Buffer::limit_offset(),T_INT)); |
|
532 LIR_Opr right = LIR_OprFact::intConst(index->as_jint()); |
|
533 __ null_check_for_branch(lir_cond_belowEqual, left, right, info); |
|
534 __ branch(lir_cond_belowEqual,left, right ,T_INT, stub); // forward branch |
|
535 #endif |
501 } else { |
536 } else { |
|
537 #ifndef MIPS64 |
502 cmp_reg_mem(lir_cond_aboveEqual, index, buffer, |
538 cmp_reg_mem(lir_cond_aboveEqual, index, buffer, |
503 java_nio_Buffer::limit_offset(), T_INT, info); |
539 java_nio_Buffer::limit_offset(), T_INT, info); |
504 __ branch(lir_cond_aboveEqual, T_INT, stub); // forward branch |
540 __ branch(lir_cond_aboveEqual, T_INT, stub); // forward branch |
|
541 #else |
|
542 LIR_Opr left = index; |
|
543 LIR_Opr right = LIR_OprFact::address(new LIR_Address( buffer, java_nio_Buffer::limit_offset(), T_INT)); |
|
544 __ null_check_for_branch(lir_cond_aboveEqual, left, right, info); |
|
545 __ branch(lir_cond_aboveEqual,left, right ,T_INT, stub); // forward branch |
|
546 #endif |
505 } |
547 } |
506 __ move(index, result); |
548 __ move(index, result); |
507 } |
549 } |
508 |
550 |
509 |
551 |
669 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci()); |
711 tty->print_cr(" ###class not resolved at new bci %d", new_instance->printable_bci()); |
670 } |
712 } |
671 } |
713 } |
672 #endif |
714 #endif |
673 |
715 |
|
716 #ifndef MIPS64 |
674 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) { |
717 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, LIR_Opr scratch4, LIR_Opr klass_reg, CodeEmitInfo* info) { |
|
718 #else |
|
719 void LIRGenerator::new_instance(LIR_Opr dst, ciInstanceKlass* klass, bool is_unresolved, LIR_Opr scratch1, LIR_Opr scratch2, LIR_Opr scratch3, |
|
720 LIR_Opr scratch4, LIR_Opr scratch5, LIR_Opr scratch6,LIR_Opr klass_reg, CodeEmitInfo* info) { |
|
721 #endif |
675 klass2reg_with_patching(klass_reg, klass, info, is_unresolved); |
722 klass2reg_with_patching(klass_reg, klass, info, is_unresolved); |
676 // If klass is not loaded we do not know if the klass has finalizers: |
723 // If klass is not loaded we do not know if the klass has finalizers: |
677 if (UseFastNewInstance && klass->is_loaded() |
724 if (UseFastNewInstance && klass->is_loaded() |
678 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) { |
725 && !Klass::layout_helper_needs_slow_path(klass->layout_helper())) { |
679 |
726 |
683 |
730 |
684 assert(klass->is_loaded(), "must be loaded"); |
731 assert(klass->is_loaded(), "must be loaded"); |
685 // allocate space for instance |
732 // allocate space for instance |
686 assert(klass->size_helper() >= 0, "illegal instance size"); |
733 assert(klass->size_helper() >= 0, "illegal instance size"); |
687 const int instance_size = align_object_size(klass->size_helper()); |
734 const int instance_size = align_object_size(klass->size_helper()); |
|
735 #ifndef MIPS64 |
688 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4, |
736 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4, |
689 oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path); |
737 oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path); |
|
738 #else |
|
739 __ allocate_object(dst, scratch1, scratch2, scratch3, scratch4, scratch5, scratch6, |
|
740 oopDesc::header_size(), instance_size, klass_reg, !klass->is_initialized(), slow_path); |
|
741 |
|
742 #endif |
690 } else { |
743 } else { |
691 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, Runtime1::new_instance_id); |
744 CodeStub* slow_path = new NewInstanceStub(klass_reg, dst, klass, info, Runtime1::new_instance_id); |
|
745 #ifndef MIPS64 |
692 __ branch(lir_cond_always, T_ILLEGAL, slow_path); |
746 __ branch(lir_cond_always, T_ILLEGAL, slow_path); |
693 __ branch_destination(slow_path->continuation()); |
747 __ branch_destination(slow_path->continuation()); |
|
748 #else |
|
749 __ branch(lir_cond_always, LIR_OprFact::illegalOpr, LIR_OprFact::illegalOpr, T_ILLEGAL, slow_path); |
|
750 __ branch_destination(slow_path->continuation()); |
|
751 #endif |
694 } |
752 } |
695 } |
753 } |
696 |
754 |
697 |
755 |
698 static bool is_constant_zero(Instruction* inst) { |
756 static bool is_constant_zero(Instruction* inst) { |
968 LIR_Address* fake_incr_value = new LIR_Address(data_reg, DataLayout::counter_increment, T_INT); |
1027 LIR_Address* fake_incr_value = new LIR_Address(data_reg, DataLayout::counter_increment, T_INT); |
969 __ leal(LIR_OprFact::address(fake_incr_value), data_reg); |
1028 __ leal(LIR_OprFact::address(fake_incr_value), data_reg); |
970 __ move(data_reg, data_addr); |
1029 __ move(data_reg, data_addr); |
971 } |
1030 } |
972 } |
1031 } |
|
1032 #else |
|
1033 void LIRGenerator::profile_branch(If* if_instr, If::Condition cond , LIR_Opr left, LIR_Opr right) { |
|
1034 if (if_instr->should_profile()) { |
|
1035 ciMethod* method = if_instr->profiled_method(); |
|
1036 assert(method != NULL, "method should be set if branch is profiled"); |
|
1037 ciMethodData* md = method->method_data_or_null(); |
|
1038 if (md == NULL) { |
|
1039 bailout("out of memory building methodDataOop"); |
|
1040 return; |
|
1041 } |
|
1042 ciProfileData* data = md->bci_to_data(if_instr->profiled_bci()); |
|
1043 assert(data != NULL, "must have profiling data"); |
|
1044 assert(data->is_BranchData(), "need BranchData for two-way branches"); |
|
1045 int taken_count_offset = md->byte_offset_of_slot(data, BranchData::taken_offset()); |
|
1046 int not_taken_count_offset = md->byte_offset_of_slot(data, BranchData::not_taken_offset()); |
|
1047 if (if_instr->is_swapped()) { |
|
1048 int t = taken_count_offset; |
|
1049 taken_count_offset = not_taken_count_offset; |
|
1050 not_taken_count_offset = t; |
|
1051 } |
|
1052 LIR_Opr md_reg = new_register(T_METADATA); |
|
1053 __ metadata2reg(md->constant_encoding(), md_reg); |
|
1054 //__ move(LIR_OprFact::oopConst(md->constant_encoding()), md_reg); |
|
1055 LIR_Opr data_offset_reg = new_pointer_register(); |
|
1056 |
|
1057 LIR_Opr opr1 = LIR_OprFact::intConst(taken_count_offset); |
|
1058 LIR_Opr opr2 = LIR_OprFact::intConst(not_taken_count_offset); |
|
1059 LabelObj* skip = new LabelObj(); |
|
1060 |
|
1061 __ move(opr1, data_offset_reg); |
|
1062 __ branch( lir_cond(cond), left, right, skip->label()); |
|
1063 __ move(opr2, data_offset_reg); |
|
1064 __ branch_destination(skip->label()); |
|
1065 |
|
1066 LIR_Opr data_reg = new_pointer_register(); |
|
1067 LIR_Opr tmp_reg = new_pointer_register(); |
|
1068 // LIR_Address* data_addr = new LIR_Address(md_reg, data_offset_reg, T_INT); |
|
1069 __ move(data_offset_reg, tmp_reg); |
|
1070 __ add(tmp_reg, md_reg, tmp_reg); |
|
1071 LIR_Address* data_addr = new LIR_Address(tmp_reg, 0, T_INT); |
|
1072 __ move(LIR_OprFact::address(data_addr), data_reg); |
|
1073 LIR_Address* fake_incr_value = new LIR_Address(data_reg, DataLayout::counter_increment, T_INT); |
|
1074 // Use leal instead of add to avoid destroying condition codes on x86 |
|
1075 __ leal(LIR_OprFact::address(fake_incr_value), data_reg); |
|
1076 __ move(data_reg, LIR_OprFact::address(data_addr)); |
|
1077 } |
|
1078 } |
|
1079 |
|
1080 #endif |
973 |
1081 |
974 // Phi technique: |
1082 // Phi technique: |
975 // This is about passing live values from one basic block to the other. |
1083 // This is about passing live values from one basic block to the other. |
976 // In code generated with Java it is rather rare that more than one |
1084 // In code generated with Java it is rather rare that more than one |
977 // value is on the stack from one basic block to the other. |
1085 // value is on the stack from one basic block to the other. |
1094 if (oc) { |
1202 if (oc) { |
1095 return oc->constant_value(); |
1203 return oc->constant_value(); |
1096 } |
1204 } |
1097 return NULL; |
1205 return NULL; |
1098 } |
1206 } |
|
1207 #ifdef MIPS64 |
|
1208 void LIRGenerator::write_barrier(LIR_Opr addr) { |
|
1209 if (addr->is_address()) { |
|
1210 LIR_Address* address = (LIR_Address*)addr; |
|
1211 LIR_Opr ptr = new_register(T_OBJECT); |
|
1212 if (!address->index()->is_valid() && address->disp() == 0) { |
|
1213 __ move(address->base(), ptr); |
|
1214 } else { |
|
1215 __ leal(addr, ptr); |
|
1216 } |
|
1217 addr = ptr; |
|
1218 } |
|
1219 assert(addr->is_register(), "must be a register at this point"); |
|
1220 |
|
1221 LIR_Opr tmp = new_pointer_register(); |
|
1222 if (TwoOperandLIRForm) { |
|
1223 __ move(addr, tmp); |
|
1224 __ unsigned_shift_right(tmp, CardTableModRefBS::card_shift, tmp); |
|
1225 } else { |
|
1226 __ unsigned_shift_right(addr, CardTableModRefBS::card_shift, tmp); |
|
1227 } |
|
1228 if (can_inline_as_constant(card_table_base())) { |
|
1229 __ move(LIR_OprFact::intConst(0), new LIR_Address(tmp, card_table_base()->as_jint(), T_BYTE)); |
|
1230 } else { |
|
1231 __ add(tmp, load_constant(card_table_base()), tmp); |
|
1232 __ move(LIR_OprFact::intConst(0), new LIR_Address(tmp, 0, T_BYTE)); |
|
1233 } |
|
1234 } |
|
1235 #endif |
1099 |
1236 |
1100 |
1237 |
1101 void LIRGenerator::do_ExceptionObject(ExceptionObject* x) { |
1238 void LIRGenerator::do_ExceptionObject(ExceptionObject* x) { |
1102 assert(block()->is_set(BlockBegin::exception_entry_flag), "ExceptionObject only allowed in exception handler block"); |
1239 assert(block()->is_set(BlockBegin::exception_entry_flag), "ExceptionObject only allowed in exception handler block"); |
1103 assert(block()->next() == x, "ExceptionObject must be first instruction of block"); |
1240 assert(block()->next() == x, "ExceptionObject must be first instruction of block"); |
1566 __ leal(new_val, new_val_reg); |
1710 __ leal(new_val, new_val_reg); |
1567 new_val = new_val_reg; |
1711 new_val = new_val_reg; |
1568 } |
1712 } |
1569 assert(new_val->is_register(), "must be a register at this point"); |
1713 assert(new_val->is_register(), "must be a register at this point"); |
1570 |
1714 |
|
1715 #ifndef MIPS64 |
1571 __ cmp(lir_cond_notEqual, xor_shift_res, LIR_OprFact::intptrConst(NULL_WORD)); |
1716 __ cmp(lir_cond_notEqual, xor_shift_res, LIR_OprFact::intptrConst(NULL_WORD)); |
1572 |
1717 |
|
1718 #endif |
1573 CodeStub* slow = new G1PostBarrierStub(addr, new_val); |
1719 CodeStub* slow = new G1PostBarrierStub(addr, new_val); |
|
1720 #ifndef MIPS64 |
1574 __ branch(lir_cond_notEqual, LP64_ONLY(T_LONG) NOT_LP64(T_INT), slow); |
1721 __ branch(lir_cond_notEqual, LP64_ONLY(T_LONG) NOT_LP64(T_INT), slow); |
|
1722 #else |
|
1723 __ branch(lir_cond_notEqual, xor_shift_res, LIR_OprFact::intptrConst((intptr_t)NULL_WORD), LP64_ONLY(T_LONG) NOT_LP64(T_INT), slow); |
|
1724 #endif |
1575 __ branch_destination(slow->continuation()); |
1725 __ branch_destination(slow->continuation()); |
1576 } |
1726 } |
1577 |
1727 |
1578 #endif // INCLUDE_ALL_GCS |
1728 #endif // INCLUDE_ALL_GCS |
1579 //////////////////////////////////////////////////////////////////////// |
1729 //////////////////////////////////////////////////////////////////////// |
1834 LIR_Opr result = rlock_result(x); |
1991 LIR_Opr result = rlock_result(x); |
1835 if (GenerateRangeChecks) { |
1992 if (GenerateRangeChecks) { |
1836 CodeEmitInfo* info = state_for(x); |
1993 CodeEmitInfo* info = state_for(x); |
1837 CodeStub* stub = new RangeCheckStub(info, index.result(), true); |
1994 CodeStub* stub = new RangeCheckStub(info, index.result(), true); |
1838 if (index.result()->is_constant()) { |
1995 if (index.result()->is_constant()) { |
|
1996 #ifndef MIPS64 |
1839 cmp_mem_int(lir_cond_belowEqual, buf.result(), java_nio_Buffer::limit_offset(), index.result()->as_jint(), info); |
1997 cmp_mem_int(lir_cond_belowEqual, buf.result(), java_nio_Buffer::limit_offset(), index.result()->as_jint(), info); |
1840 __ branch(lir_cond_belowEqual, T_INT, stub); |
1998 __ branch(lir_cond_belowEqual, T_INT, stub); |
|
1999 #else |
|
2000 LIR_Opr left = LIR_OprFact::address(new LIR_Address( buf.result(), |
|
2001 java_nio_Buffer::limit_offset(),T_INT)); |
|
2002 LIR_Opr right = LIR_OprFact::intConst(index.result()->as_jint()); |
|
2003 __ null_check_for_branch(lir_cond_belowEqual, left, right, info); |
|
2004 __ branch(lir_cond_belowEqual,left, right ,T_INT, stub); // forward branch |
|
2005 |
|
2006 #endif |
1841 } else { |
2007 } else { |
|
2008 #ifndef MIPS64 |
1842 cmp_reg_mem(lir_cond_aboveEqual, index.result(), buf.result(), |
2009 cmp_reg_mem(lir_cond_aboveEqual, index.result(), buf.result(), |
1843 java_nio_Buffer::limit_offset(), T_INT, info); |
2010 java_nio_Buffer::limit_offset(), T_INT, info); |
1844 __ branch(lir_cond_aboveEqual, T_INT, stub); |
2011 __ branch(lir_cond_aboveEqual, T_INT, stub); |
|
2012 #else |
|
2013 LIR_Opr right = LIR_OprFact::address(new LIR_Address( buf.result(), java_nio_Buffer::limit_offset(),T_INT)); |
|
2014 LIR_Opr left = index.result(); |
|
2015 __ null_check_for_branch(lir_cond_aboveEqual, left, right, info); |
|
2016 __ branch(lir_cond_aboveEqual, left, right , T_INT, stub); // forward branch |
|
2017 #endif |
1845 } |
2018 } |
1846 __ move(index.result(), result); |
2019 __ move(index.result(), result); |
1847 } else { |
2020 } else { |
1848 // Just load the index into the result register |
2021 // Just load the index into the result register |
1849 __ move(index.result(), result); |
2022 __ move(index.result(), result); |
1916 // emit array address setup early so it schedules better |
2089 // emit array address setup early so it schedules better |
1917 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), false); |
2090 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), false); |
1918 |
2091 |
1919 if (GenerateRangeChecks && needs_range_check) { |
2092 if (GenerateRangeChecks && needs_range_check) { |
1920 if (StressLoopInvariantCodeMotion && range_check_info->deoptimize_on_exception()) { |
2093 if (StressLoopInvariantCodeMotion && range_check_info->deoptimize_on_exception()) { |
|
2094 #ifndef MIPS64 |
1921 __ branch(lir_cond_always, T_ILLEGAL, new RangeCheckStub(range_check_info, index.result())); |
2095 __ branch(lir_cond_always, T_ILLEGAL, new RangeCheckStub(range_check_info, index.result())); |
|
2096 #else |
|
2097 tty->print_cr("LIRGenerator::do_LoadIndexed(LoadIndexed* x) unimplemented yet!"); |
|
2098 Unimplemented(); |
|
2099 #endif |
1922 } else if (use_length) { |
2100 } else if (use_length) { |
1923 // TODO: use a (modified) version of array_range_check that does not require a |
2101 // TODO: use a (modified) version of array_range_check that does not require a |
1924 // constant length to be loaded to a register |
2102 // constant length to be loaded to a register |
|
2103 #ifndef MIPS64 |
1925 __ cmp(lir_cond_belowEqual, length.result(), index.result()); |
2104 __ cmp(lir_cond_belowEqual, length.result(), index.result()); |
1926 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result())); |
2105 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result())); |
|
2106 #else |
|
2107 __ branch(lir_cond_belowEqual, length.result(), index.result(),T_INT, new RangeCheckStub(range_check_info, index.result())); |
|
2108 #endif |
1927 } else { |
2109 } else { |
1928 array_range_check(array.result(), index.result(), null_check_info, range_check_info); |
2110 array_range_check(array.result(), index.result(), null_check_info, range_check_info); |
1929 // The range check performs the null check, so clear it out for the load |
2111 // The range check performs the null check, so clear it out for the load |
1930 null_check_info = NULL; |
2112 null_check_info = NULL; |
1931 } |
2113 } |
2109 addr = new LIR_Address(base_op, index_op, LIR_Address::Scale(log2_scale), 0, dst_type); |
2291 addr = new LIR_Address(base_op, index_op, LIR_Address::Scale(log2_scale), 0, dst_type); |
2110 #elif defined(GENERATE_ADDRESS_IS_PREFERRED) |
2292 #elif defined(GENERATE_ADDRESS_IS_PREFERRED) |
2111 addr = generate_address(base_op, index_op, log2_scale, 0, dst_type); |
2293 addr = generate_address(base_op, index_op, log2_scale, 0, dst_type); |
2112 #else |
2294 #else |
2113 if (index_op->is_illegal() || log2_scale == 0) { |
2295 if (index_op->is_illegal() || log2_scale == 0) { |
|
2296 #ifndef MIPS64 |
2114 addr = new LIR_Address(base_op, index_op, dst_type); |
2297 addr = new LIR_Address(base_op, index_op, dst_type); |
|
2298 #else |
|
2299 #ifdef _LP64 |
|
2300 LIR_Opr ptr = new_register(T_LONG); |
|
2301 #else |
|
2302 LIR_Opr ptr = new_register(T_INT); |
|
2303 #endif |
|
2304 __ move(base_op, ptr); |
|
2305 if(index_op -> is_valid()) |
|
2306 __ add(ptr, index_op, ptr); |
|
2307 addr = new LIR_Address(ptr, 0, dst_type); |
|
2308 #endif |
2115 } else { |
2309 } else { |
2116 LIR_Opr tmp = new_pointer_register(); |
2310 LIR_Opr tmp = new_pointer_register(); |
2117 __ shift_left(index_op, log2_scale, tmp); |
2311 __ shift_left(index_op, log2_scale, tmp); |
2118 addr = new LIR_Address(base_op, tmp, dst_type); |
2312 addr = new LIR_Address(base_op, tmp, dst_type); |
2119 } |
2313 } |
2319 } else { |
2513 } else { |
2320 assert(off.type()->is_long(), "what else?"); |
2514 assert(off.type()->is_long(), "what else?"); |
2321 referent_off = new_register(T_LONG); |
2515 referent_off = new_register(T_LONG); |
2322 __ move(LIR_OprFact::longConst(java_lang_ref_Reference::referent_offset), referent_off); |
2516 __ move(LIR_OprFact::longConst(java_lang_ref_Reference::referent_offset), referent_off); |
2323 } |
2517 } |
|
2518 #ifndef MIPS64 |
2324 __ cmp(lir_cond_notEqual, off.result(), referent_off); |
2519 __ cmp(lir_cond_notEqual, off.result(), referent_off); |
2325 __ branch(lir_cond_notEqual, as_BasicType(off.type()), Lcont->label()); |
2520 __ branch(lir_cond_notEqual, as_BasicType(off.type()), Lcont->label()); |
|
2521 #else |
|
2522 __ branch(lir_cond_notEqual, off.result(), referent_off, Lcont->label()); |
|
2523 #endif |
2326 } |
2524 } |
2327 if (gen_source_check) { |
2525 if (gen_source_check) { |
2328 // offset is a const and equals referent offset |
2526 // offset is a const and equals referent offset |
2329 // if (source == null) -> continue |
2527 // if (source == null) -> continue |
|
2528 #ifndef MIPS64 |
2330 __ cmp(lir_cond_equal, src.result(), LIR_OprFact::oopConst(NULL)); |
2529 __ cmp(lir_cond_equal, src.result(), LIR_OprFact::oopConst(NULL)); |
2331 __ branch(lir_cond_equal, T_OBJECT, Lcont->label()); |
2530 __ branch(lir_cond_equal, T_OBJECT, Lcont->label()); |
|
2531 #else |
|
2532 __ branch(lir_cond_equal, src.result(), LIR_OprFact::oopConst(NULL), Lcont->label()); |
|
2533 #endif |
2332 } |
2534 } |
2333 LIR_Opr src_klass = new_register(T_OBJECT); |
2535 LIR_Opr src_klass = new_register(T_OBJECT); |
2334 if (gen_type_check) { |
2536 if (gen_type_check) { |
2335 // We have determined that offset == referent_offset && src != null. |
2537 // We have determined that offset == referent_offset && src != null. |
2336 // if (src->_klass->_reference_type == REF_NONE) -> continue |
2538 // if (src->_klass->_reference_type == REF_NONE) -> continue |
2337 __ move(new LIR_Address(src.result(), oopDesc::klass_offset_in_bytes(), T_ADDRESS), src_klass); |
2539 __ move(new LIR_Address(src.result(), oopDesc::klass_offset_in_bytes(), T_ADDRESS), src_klass); |
2338 LIR_Address* reference_type_addr = new LIR_Address(src_klass, in_bytes(InstanceKlass::reference_type_offset()), T_BYTE); |
2540 LIR_Address* reference_type_addr = new LIR_Address(src_klass, in_bytes(InstanceKlass::reference_type_offset()), T_BYTE); |
2339 LIR_Opr reference_type = new_register(T_INT); |
2541 LIR_Opr reference_type = new_register(T_INT); |
2340 __ move(reference_type_addr, reference_type); |
2542 __ move(reference_type_addr, reference_type); |
|
2543 #ifndef MIPS64 |
2341 __ cmp(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE)); |
2544 __ cmp(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE)); |
2342 __ branch(lir_cond_equal, T_INT, Lcont->label()); |
2545 __ branch(lir_cond_equal, T_INT, Lcont->label()); |
|
2546 #else |
|
2547 __ branch(lir_cond_equal, reference_type, LIR_OprFact::intConst(REF_NONE), Lcont->label()); |
|
2548 #endif |
2343 } |
2549 } |
2344 { |
2550 { |
2345 // We have determined that src->_klass->_reference_type != REF_NONE |
2551 // We have determined that src->_klass->_reference_type != REF_NONE |
2346 // so register the value in the referent field with the pre-barrier. |
2552 // so register the value in the referent field with the pre-barrier. |
2347 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */, |
2553 pre_barrier(LIR_OprFact::illegalOpr /* addr_opr */, |
2417 SwitchRange* one_range = x->at(i); |
2623 SwitchRange* one_range = x->at(i); |
2418 int low_key = one_range->low_key(); |
2624 int low_key = one_range->low_key(); |
2419 int high_key = one_range->high_key(); |
2625 int high_key = one_range->high_key(); |
2420 BlockBegin* dest = one_range->sux(); |
2626 BlockBegin* dest = one_range->sux(); |
2421 if (low_key == high_key) { |
2627 if (low_key == high_key) { |
|
2628 #ifndef MIPS64 |
2422 __ cmp(lir_cond_equal, value, low_key); |
2629 __ cmp(lir_cond_equal, value, low_key); |
2423 __ branch(lir_cond_equal, T_INT, dest); |
2630 __ branch(lir_cond_equal, T_INT, dest); |
|
2631 #else |
|
2632 __ branch(lir_cond_equal, value, LIR_OprFact::intConst(low_key), T_INT, dest); |
|
2633 #endif |
2424 } else if (high_key - low_key == 1) { |
2634 } else if (high_key - low_key == 1) { |
|
2635 #ifndef MIPS64 |
2425 __ cmp(lir_cond_equal, value, low_key); |
2636 __ cmp(lir_cond_equal, value, low_key); |
2426 __ branch(lir_cond_equal, T_INT, dest); |
2637 __ branch(lir_cond_equal, T_INT, dest); |
2427 __ cmp(lir_cond_equal, value, high_key); |
2638 __ cmp(lir_cond_equal, value, high_key); |
2428 __ branch(lir_cond_equal, T_INT, dest); |
2639 __ branch(lir_cond_equal, T_INT, dest); |
|
2640 #else |
|
2641 __ branch(lir_cond_equal, value, LIR_OprFact::intConst(low_key), T_INT, dest); |
|
2642 __ branch(lir_cond_equal, value, LIR_OprFact::intConst(high_key), T_INT, dest); |
|
2643 |
|
2644 #endif |
2429 } else { |
2645 } else { |
2430 LabelObj* L = new LabelObj(); |
2646 LabelObj* L = new LabelObj(); |
|
2647 #ifndef MIPS64 |
2431 __ cmp(lir_cond_less, value, low_key); |
2648 __ cmp(lir_cond_less, value, low_key); |
2432 __ branch(lir_cond_less, T_INT, L->label()); |
2649 __ branch(lir_cond_less, T_INT, L->label()); |
2433 __ cmp(lir_cond_lessEqual, value, high_key); |
2650 __ cmp(lir_cond_lessEqual, value, high_key); |
2434 __ branch(lir_cond_lessEqual, T_INT, dest); |
2651 __ branch(lir_cond_lessEqual, T_INT, dest); |
2435 __ branch_destination(L->label()); |
2652 __ branch_destination(L->label()); |
|
2653 #else |
|
2654 __ branch(lir_cond_less, value, LIR_OprFact::intConst(low_key), L->label()); |
|
2655 __ branch(lir_cond_lessEqual, value, LIR_OprFact::intConst(high_key), T_INT, dest); |
|
2656 __ branch_destination(L->label()); |
|
2657 #endif |
2436 } |
2658 } |
2437 } |
2659 } |
2438 __ jump(default_sux); |
2660 __ jump(default_sux); |
2439 } |
2661 } |
2440 |
2662 |
3049 LIRItem f_val(x->fval(), this); |
3279 LIRItem f_val(x->fval(), this); |
3050 t_val.dont_load_item(); |
3280 t_val.dont_load_item(); |
3051 f_val.dont_load_item(); |
3281 f_val.dont_load_item(); |
3052 LIR_Opr reg = rlock_result(x); |
3282 LIR_Opr reg = rlock_result(x); |
3053 |
3283 |
|
3284 #ifndef MIPS64 |
3054 __ cmp(lir_cond(x->cond()), left.result(), right.result()); |
3285 __ cmp(lir_cond(x->cond()), left.result(), right.result()); |
3055 __ cmove(lir_cond(x->cond()), t_val.result(), f_val.result(), reg, as_BasicType(x->x()->type())); |
3286 __ cmove(lir_cond(x->cond()), t_val.result(), f_val.result(), reg, as_BasicType(x->x()->type())); |
|
3287 #else |
|
3288 LIR_Opr opr1 = t_val.result(); |
|
3289 LIR_Opr opr2 = f_val.result(); |
|
3290 LabelObj* skip = new LabelObj(); |
|
3291 __ move(opr1, reg); |
|
3292 __ branch(lir_cond(x->cond()), left.result(), right.result(), skip->label()); |
|
3293 __ move(opr2, reg); |
|
3294 __ branch_destination(skip->label()); |
|
3295 #endif |
3056 } |
3296 } |
3057 |
3297 |
3058 void LIRGenerator::do_RuntimeCall(address routine, int expected_arguments, Intrinsic* x) { |
3298 void LIRGenerator::do_RuntimeCall(address routine, int expected_arguments, Intrinsic* x) { |
3059 assert(x->number_of_arguments() == expected_arguments, "wrong type"); |
3299 assert(x->number_of_arguments() == expected_arguments, "wrong type"); |
3060 LIR_Opr reg = result_register_for(x->type()); |
3300 LIR_Opr reg = result_register_for(x->type()); |
3401 if (notify) { |
3641 if (notify) { |
3402 LIR_Opr mask = load_immediate(frequency << InvocationCounter::count_shift, T_INT); |
3642 LIR_Opr mask = load_immediate(frequency << InvocationCounter::count_shift, T_INT); |
3403 LIR_Opr meth = new_register(T_METADATA); |
3643 LIR_Opr meth = new_register(T_METADATA); |
3404 __ metadata2reg(method->constant_encoding(), meth); |
3644 __ metadata2reg(method->constant_encoding(), meth); |
3405 __ logical_and(result, mask, result); |
3645 __ logical_and(result, mask, result); |
|
3646 #ifndef MIPS64 |
3406 __ cmp(lir_cond_equal, result, LIR_OprFact::intConst(0)); |
3647 __ cmp(lir_cond_equal, result, LIR_OprFact::intConst(0)); |
|
3648 #endif |
3407 // The bci for info can point to cmp for if's we want the if bci |
3649 // The bci for info can point to cmp for if's we want the if bci |
3408 CodeStub* overflow = new CounterOverflowStub(info, bci, meth); |
3650 CodeStub* overflow = new CounterOverflowStub(info, bci, meth); |
|
3651 #ifndef MIPS64 |
3409 __ branch(lir_cond_equal, T_INT, overflow); |
3652 __ branch(lir_cond_equal, T_INT, overflow); |
|
3653 #else |
|
3654 __ branch(lir_cond_equal, result, LIR_OprFact::intConst(0), T_INT, overflow); |
|
3655 #endif |
3410 __ branch_destination(overflow->continuation()); |
3656 __ branch_destination(overflow->continuation()); |
3411 } |
3657 } |
3412 } |
3658 } |
3413 |
3659 |
3414 void LIRGenerator::do_RuntimeCall(RuntimeCall* x) { |
3660 void LIRGenerator::do_RuntimeCall(RuntimeCall* x) { |