Mon, 27 Jun 2016 17:16:58 +0800
[Code Reorganization] Removed useless codes.
1 /*
2 * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2015, 2016, Loongson Technology. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
26 #ifndef CPU_MIPS_VM_ASSEMBLER_MIPS_INLINE_HPP
27 #define CPU_MIPS_VM_ASSEMBLER_MIPS_INLINE_HPP
29 #include "asm/assembler.inline.hpp"
30 #include "asm/codeBuffer.hpp"
31 #include "code/codeCache.hpp"
33 /*
34 inline void MacroAssembler::pd_patch_instruction(address branch, address target) {
35 jint& stub_inst = *(jint*) branch;
36 stub_inst = patched_branch(target - branch, stub_inst, 0);
37 }
38 */
40 #ifndef PRODUCT
41 /*
42 inline void MacroAssembler::pd_print_patched_instruction(address branch) {
43 jint stub_inst = *(jint*) branch;
44 print_instruction(stub_inst);
45 ::tty->print("%s", " (unresolved)");
46 }
47 */
48 #endif // PRODUCT
50 //inline bool Address::is_simm13(int offset) { return Assembler::is_simm13(disp() + offset); }
53 inline void Assembler::check_delay() {
54 # ifdef CHECK_DELAY
55 // guarantee( delay_state != at_delay_slot, "must say delayed() when filling delay slot");
56 delay_state = no_delay;
57 # endif
58 }
60 inline void Assembler::emit_long(int x) {
61 check_delay();
62 AbstractAssembler::emit_int32(x);
63 }
65 inline void Assembler::emit_data(int x, relocInfo::relocType rtype) {
66 relocate(rtype);
67 emit_long(x);
68 }
70 inline void Assembler::emit_data(int x, RelocationHolder const& rspec) {
71 relocate(rspec);
72 emit_long(x);
73 }
74 /*
75 inline void MacroAssembler::store_int_argument(Register s, Argument &a) {
76 if(a.is_Register()) {
77 move(a.as_Register(), s);
78 } else {
79 sw(s, a.as_caller_address());
80 }
81 }
83 inline void MacroAssembler::store_long_argument(Register s, Argument &a) {
84 Argument a1 = a.successor();
85 if(a.is_Register() && a1.is_Register()) {
86 move(a.as_Register(), s);
87 move(a.as_Register(), s);
88 } else {
89 sd(s, a.as_caller_address());
90 }
91 }
93 inline void MacroAssembler::store_float_argument(FloatRegister s, Argument &a) {
94 if(a.is_Register()) {
95 mov_s(a.as_FloatRegister(), s);
96 } else {
97 swc1(s, a.as_caller_address());
98 }
99 }
101 inline void MacroAssembler::store_double_argument(FloatRegister s, Argument &a) {
102 if(a.is_Register()) {
103 mov_d(a.as_FloatRegister(), s);
104 } else {
105 sdc1(s, a.as_caller_address());
106 }
107 }
109 inline void MacroAssembler::store_ptr_argument(Register s, Argument &a) {
110 if(a.is_Register()) {
111 move(a.as_Register(), s);
112 } else {
113 st_ptr(s, a.as_caller_address());
114 }
115 }
116 inline void MacroAssembler::ld_ptr(Register rt, Register base, int offset16) {
117 #ifdef _LP64
118 ld(rt, base, offset16);
119 #else
120 lw(rt, base, offset16);
121 #endif
122 }
123 inline void MacroAssembler::ld_ptr(Register rt, Address a) {
124 #ifdef _LP64
125 ld(rt, a.base(), a.disp());
126 #else
127 lw(rt, a.base(), a.disp());
128 #endif
129 }
131 inline void MacroAssembler::st_ptr(Register rt, Address a) {
132 #ifdef _LP64
133 sd(rt, a.base(), a.disp());
134 #else
135 sw(rt, a.base(), a.disp());
136 #endif
137 }
139 inline void MacroAssembler::st_ptr(Register rt, Register base, int offset16) {
140 #ifdef _LP64
141 sd(rt, base, offset16);
142 #else
143 sw(rt, base, offset16);
144 #endif
145 }
147 inline void MacroAssembler::ld_long(Register rt, Register base, int offset16) {
148 #ifdef _LP64
149 ld(rt, base, offset16);
150 #else
151 lw(rt, base, offset16);
152 #endif
153 }
155 inline void MacroAssembler::st_long(Register rt, Register base, int offset16) {
156 #ifdef _LP64
157 sd(rt, base, offset16);
158 #else
159 sw(rt, base, offset16);
160 #endif
161 }
163 inline void MacroAssembler::ld_long(Register rt, Address a) {
164 #ifdef _LP64
165 ld(rt, a.base(), a.disp());
166 #else
167 lw(rt, a.base(), a.disp());
168 #endif
169 }
171 inline void MacroAssembler::st_long(Register rt, Address a) {
172 #ifdef _LP64
173 sd(rt, a.base(), a.disp());
174 #else
175 sw(rt, a.base(), a.disp());
176 #endif
177 }
179 inline void MacroAssembler::addu_long(Register rd, Register rs, Register rt) {
180 #ifdef _LP64
181 daddu(rd, rs, rt);
182 #else
183 addu(rd, rs, rt);
184 #endif
185 }
187 inline void MacroAssembler::addu_long(Register rd, Register rs, long imm32_64) {
188 #ifdef _LP64
189 daddiu(rd, rs, imm32_64);
190 #else
191 addiu(rd, rs, imm32_64);
192 #endif
193 } */
195 #endif // CPU_MIPS_VM_ASSEMBLER_MIPS_INLINE_HPP