2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "exec/helper-proto.h"
26 #include "exec/helper-gen.h"
28 int gen_new_label(void);
30 static inline void tcg_gen_op0(TCGOpcode opc
)
32 *tcg_ctx
.gen_opc_ptr
++ = opc
;
35 static inline void tcg_gen_op1_i32(TCGOpcode opc
, TCGv_i32 arg1
)
37 *tcg_ctx
.gen_opc_ptr
++ = opc
;
38 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
41 static inline void tcg_gen_op1_i64(TCGOpcode opc
, TCGv_i64 arg1
)
43 *tcg_ctx
.gen_opc_ptr
++ = opc
;
44 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
47 static inline void tcg_gen_op1i(TCGOpcode opc
, TCGArg arg1
)
49 *tcg_ctx
.gen_opc_ptr
++ = opc
;
50 *tcg_ctx
.gen_opparam_ptr
++ = arg1
;
53 static inline void tcg_gen_op2_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
)
55 *tcg_ctx
.gen_opc_ptr
++ = opc
;
56 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
57 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
60 static inline void tcg_gen_op2_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
)
62 *tcg_ctx
.gen_opc_ptr
++ = opc
;
63 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
64 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
67 static inline void tcg_gen_op2i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGArg arg2
)
69 *tcg_ctx
.gen_opc_ptr
++ = opc
;
70 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
71 *tcg_ctx
.gen_opparam_ptr
++ = arg2
;
74 static inline void tcg_gen_op2i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGArg arg2
)
76 *tcg_ctx
.gen_opc_ptr
++ = opc
;
77 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
78 *tcg_ctx
.gen_opparam_ptr
++ = arg2
;
81 static inline void tcg_gen_op2ii(TCGOpcode opc
, TCGArg arg1
, TCGArg arg2
)
83 *tcg_ctx
.gen_opc_ptr
++ = opc
;
84 *tcg_ctx
.gen_opparam_ptr
++ = arg1
;
85 *tcg_ctx
.gen_opparam_ptr
++ = arg2
;
88 static inline void tcg_gen_op3_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
91 *tcg_ctx
.gen_opc_ptr
++ = opc
;
92 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
93 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
94 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
97 static inline void tcg_gen_op3_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
100 *tcg_ctx
.gen_opc_ptr
++ = opc
;
101 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
102 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
103 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
106 static inline void tcg_gen_op3i_i32(TCGOpcode opc
, TCGv_i32 arg1
,
107 TCGv_i32 arg2
, TCGArg arg3
)
109 *tcg_ctx
.gen_opc_ptr
++ = opc
;
110 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
111 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
112 *tcg_ctx
.gen_opparam_ptr
++ = arg3
;
115 static inline void tcg_gen_op3i_i64(TCGOpcode opc
, TCGv_i64 arg1
,
116 TCGv_i64 arg2
, TCGArg arg3
)
118 *tcg_ctx
.gen_opc_ptr
++ = opc
;
119 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
120 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
121 *tcg_ctx
.gen_opparam_ptr
++ = arg3
;
124 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc
, TCGv_i32 val
,
125 TCGv_ptr base
, TCGArg offset
)
127 *tcg_ctx
.gen_opc_ptr
++ = opc
;
128 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(val
);
129 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_PTR(base
);
130 *tcg_ctx
.gen_opparam_ptr
++ = offset
;
133 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc
, TCGv_i64 val
,
134 TCGv_ptr base
, TCGArg offset
)
136 *tcg_ctx
.gen_opc_ptr
++ = opc
;
137 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(val
);
138 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_PTR(base
);
139 *tcg_ctx
.gen_opparam_ptr
++ = offset
;
142 static inline void tcg_gen_op4_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
143 TCGv_i32 arg3
, TCGv_i32 arg4
)
145 *tcg_ctx
.gen_opc_ptr
++ = opc
;
146 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
147 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
148 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
149 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
152 static inline void tcg_gen_op4_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
153 TCGv_i64 arg3
, TCGv_i64 arg4
)
155 *tcg_ctx
.gen_opc_ptr
++ = opc
;
156 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
157 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
158 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
159 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
162 static inline void tcg_gen_op4i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
163 TCGv_i32 arg3
, TCGArg arg4
)
165 *tcg_ctx
.gen_opc_ptr
++ = opc
;
166 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
167 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
168 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
169 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
172 static inline void tcg_gen_op4i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
173 TCGv_i64 arg3
, TCGArg arg4
)
175 *tcg_ctx
.gen_opc_ptr
++ = opc
;
176 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
177 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
178 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
179 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
182 static inline void tcg_gen_op4ii_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
183 TCGArg arg3
, TCGArg arg4
)
185 *tcg_ctx
.gen_opc_ptr
++ = opc
;
186 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
187 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
188 *tcg_ctx
.gen_opparam_ptr
++ = arg3
;
189 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
192 static inline void tcg_gen_op4ii_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
193 TCGArg arg3
, TCGArg arg4
)
195 *tcg_ctx
.gen_opc_ptr
++ = opc
;
196 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
197 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
198 *tcg_ctx
.gen_opparam_ptr
++ = arg3
;
199 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
202 static inline void tcg_gen_op5_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
203 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGv_i32 arg5
)
205 *tcg_ctx
.gen_opc_ptr
++ = opc
;
206 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
207 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
208 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
209 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
210 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
213 static inline void tcg_gen_op5_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
214 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGv_i64 arg5
)
216 *tcg_ctx
.gen_opc_ptr
++ = opc
;
217 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
218 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
219 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
220 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
221 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
224 static inline void tcg_gen_op5i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
225 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGArg arg5
)
227 *tcg_ctx
.gen_opc_ptr
++ = opc
;
228 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
229 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
230 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
231 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
232 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
235 static inline void tcg_gen_op5i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
236 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGArg arg5
)
238 *tcg_ctx
.gen_opc_ptr
++ = opc
;
239 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
240 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
241 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
242 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
243 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
246 static inline void tcg_gen_op5ii_i32(TCGOpcode opc
, TCGv_i32 arg1
,
247 TCGv_i32 arg2
, TCGv_i32 arg3
,
248 TCGArg arg4
, TCGArg arg5
)
250 *tcg_ctx
.gen_opc_ptr
++ = opc
;
251 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
252 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
253 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
254 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
255 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
258 static inline void tcg_gen_op5ii_i64(TCGOpcode opc
, TCGv_i64 arg1
,
259 TCGv_i64 arg2
, TCGv_i64 arg3
,
260 TCGArg arg4
, TCGArg arg5
)
262 *tcg_ctx
.gen_opc_ptr
++ = opc
;
263 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
264 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
265 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
266 *tcg_ctx
.gen_opparam_ptr
++ = arg4
;
267 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
270 static inline void tcg_gen_op6_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
271 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGv_i32 arg5
,
274 *tcg_ctx
.gen_opc_ptr
++ = opc
;
275 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
276 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
277 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
278 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
279 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
280 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg6
);
283 static inline void tcg_gen_op6_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
284 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGv_i64 arg5
,
287 *tcg_ctx
.gen_opc_ptr
++ = opc
;
288 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
289 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
290 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
291 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
292 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
293 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg6
);
296 static inline void tcg_gen_op6i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
297 TCGv_i32 arg3
, TCGv_i32 arg4
,
298 TCGv_i32 arg5
, TCGArg arg6
)
300 *tcg_ctx
.gen_opc_ptr
++ = opc
;
301 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
302 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
303 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
304 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
305 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
306 *tcg_ctx
.gen_opparam_ptr
++ = arg6
;
309 static inline void tcg_gen_op6i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
310 TCGv_i64 arg3
, TCGv_i64 arg4
,
311 TCGv_i64 arg5
, TCGArg arg6
)
313 *tcg_ctx
.gen_opc_ptr
++ = opc
;
314 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
315 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
316 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
317 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
318 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
319 *tcg_ctx
.gen_opparam_ptr
++ = arg6
;
322 static inline void tcg_gen_op6ii_i32(TCGOpcode opc
, TCGv_i32 arg1
,
323 TCGv_i32 arg2
, TCGv_i32 arg3
,
324 TCGv_i32 arg4
, TCGArg arg5
, TCGArg arg6
)
326 *tcg_ctx
.gen_opc_ptr
++ = opc
;
327 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
328 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
329 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
330 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
331 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
332 *tcg_ctx
.gen_opparam_ptr
++ = arg6
;
335 static inline void tcg_gen_op6ii_i64(TCGOpcode opc
, TCGv_i64 arg1
,
336 TCGv_i64 arg2
, TCGv_i64 arg3
,
337 TCGv_i64 arg4
, TCGArg arg5
, TCGArg arg6
)
339 *tcg_ctx
.gen_opc_ptr
++ = opc
;
340 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
341 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
342 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
343 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
344 *tcg_ctx
.gen_opparam_ptr
++ = arg5
;
345 *tcg_ctx
.gen_opparam_ptr
++ = arg6
;
348 static inline void tcg_add_param_i32(TCGv_i32 val
)
350 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(val
);
353 static inline void tcg_add_param_i64(TCGv_i64 val
)
355 #if TCG_TARGET_REG_BITS == 32
356 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(TCGV_LOW(val
));
357 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I32(TCGV_HIGH(val
));
359 *tcg_ctx
.gen_opparam_ptr
++ = GET_TCGV_I64(val
);
363 static inline void gen_set_label(int n
)
365 tcg_gen_op1i(INDEX_op_set_label
, n
);
368 static inline void tcg_gen_br(int label
)
370 tcg_gen_op1i(INDEX_op_br
, label
);
373 static inline void tcg_gen_mov_i32(TCGv_i32 ret
, TCGv_i32 arg
)
375 if (!TCGV_EQUAL_I32(ret
, arg
))
376 tcg_gen_op2_i32(INDEX_op_mov_i32
, ret
, arg
);
379 static inline void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
381 tcg_gen_op2i_i32(INDEX_op_movi_i32
, ret
, arg
);
386 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
388 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32
, ret
, arg2
, offset
);
391 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
393 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32
, ret
, arg2
, offset
);
396 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
398 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32
, ret
, arg2
, offset
);
401 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
403 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32
, ret
, arg2
, offset
);
406 static inline void tcg_gen_ld_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
408 tcg_gen_ldst_op_i32(INDEX_op_ld_i32
, ret
, arg2
, offset
);
411 static inline void tcg_gen_st8_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
413 tcg_gen_ldst_op_i32(INDEX_op_st8_i32
, arg1
, arg2
, offset
);
416 static inline void tcg_gen_st16_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
418 tcg_gen_ldst_op_i32(INDEX_op_st16_i32
, arg1
, arg2
, offset
);
421 static inline void tcg_gen_st_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
423 tcg_gen_ldst_op_i32(INDEX_op_st_i32
, arg1
, arg2
, offset
);
426 static inline void tcg_gen_add_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
428 tcg_gen_op3_i32(INDEX_op_add_i32
, ret
, arg1
, arg2
);
431 static inline void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
433 /* some cases can be optimized here */
435 tcg_gen_mov_i32(ret
, arg1
);
437 TCGv_i32 t0
= tcg_const_i32(arg2
);
438 tcg_gen_add_i32(ret
, arg1
, t0
);
439 tcg_temp_free_i32(t0
);
443 static inline void tcg_gen_sub_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
445 tcg_gen_op3_i32(INDEX_op_sub_i32
, ret
, arg1
, arg2
);
448 static inline void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
450 TCGv_i32 t0
= tcg_const_i32(arg1
);
451 tcg_gen_sub_i32(ret
, t0
, arg2
);
452 tcg_temp_free_i32(t0
);
455 static inline void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
457 /* some cases can be optimized here */
459 tcg_gen_mov_i32(ret
, arg1
);
461 TCGv_i32 t0
= tcg_const_i32(arg2
);
462 tcg_gen_sub_i32(ret
, arg1
, t0
);
463 tcg_temp_free_i32(t0
);
467 static inline void tcg_gen_and_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
469 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
470 tcg_gen_mov_i32(ret
, arg1
);
472 tcg_gen_op3_i32(INDEX_op_and_i32
, ret
, arg1
, arg2
);
476 static inline void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
479 /* Some cases can be optimized here. */
482 tcg_gen_movi_i32(ret
, 0);
485 tcg_gen_mov_i32(ret
, arg1
);
488 /* Don't recurse with tcg_gen_ext8u_i32. */
489 if (TCG_TARGET_HAS_ext8u_i32
) {
490 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
495 if (TCG_TARGET_HAS_ext16u_i32
) {
496 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
501 t0
= tcg_const_i32(arg2
);
502 tcg_gen_and_i32(ret
, arg1
, t0
);
503 tcg_temp_free_i32(t0
);
506 static inline void tcg_gen_or_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
508 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
509 tcg_gen_mov_i32(ret
, arg1
);
511 tcg_gen_op3_i32(INDEX_op_or_i32
, ret
, arg1
, arg2
);
515 static inline void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
517 /* Some cases can be optimized here. */
519 tcg_gen_movi_i32(ret
, -1);
520 } else if (arg2
== 0) {
521 tcg_gen_mov_i32(ret
, arg1
);
523 TCGv_i32 t0
= tcg_const_i32(arg2
);
524 tcg_gen_or_i32(ret
, arg1
, t0
);
525 tcg_temp_free_i32(t0
);
529 static inline void tcg_gen_xor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
531 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
532 tcg_gen_movi_i32(ret
, 0);
534 tcg_gen_op3_i32(INDEX_op_xor_i32
, ret
, arg1
, arg2
);
538 static inline void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
540 /* Some cases can be optimized here. */
542 tcg_gen_mov_i32(ret
, arg1
);
543 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
544 /* Don't recurse with tcg_gen_not_i32. */
545 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
547 TCGv_i32 t0
= tcg_const_i32(arg2
);
548 tcg_gen_xor_i32(ret
, arg1
, t0
);
549 tcg_temp_free_i32(t0
);
553 static inline void tcg_gen_shl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
555 tcg_gen_op3_i32(INDEX_op_shl_i32
, ret
, arg1
, arg2
);
558 static inline void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
561 tcg_gen_mov_i32(ret
, arg1
);
563 TCGv_i32 t0
= tcg_const_i32(arg2
);
564 tcg_gen_shl_i32(ret
, arg1
, t0
);
565 tcg_temp_free_i32(t0
);
569 static inline void tcg_gen_shr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
571 tcg_gen_op3_i32(INDEX_op_shr_i32
, ret
, arg1
, arg2
);
574 static inline void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
577 tcg_gen_mov_i32(ret
, arg1
);
579 TCGv_i32 t0
= tcg_const_i32(arg2
);
580 tcg_gen_shr_i32(ret
, arg1
, t0
);
581 tcg_temp_free_i32(t0
);
585 static inline void tcg_gen_sar_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
587 tcg_gen_op3_i32(INDEX_op_sar_i32
, ret
, arg1
, arg2
);
590 static inline void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
593 tcg_gen_mov_i32(ret
, arg1
);
595 TCGv_i32 t0
= tcg_const_i32(arg2
);
596 tcg_gen_sar_i32(ret
, arg1
, t0
);
597 tcg_temp_free_i32(t0
);
601 static inline void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
,
602 TCGv_i32 arg2
, int label_index
)
604 if (cond
== TCG_COND_ALWAYS
) {
605 tcg_gen_br(label_index
);
606 } else if (cond
!= TCG_COND_NEVER
) {
607 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_index
);
611 static inline void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
,
612 int32_t arg2
, int label_index
)
614 if (cond
== TCG_COND_ALWAYS
) {
615 tcg_gen_br(label_index
);
616 } else if (cond
!= TCG_COND_NEVER
) {
617 TCGv_i32 t0
= tcg_const_i32(arg2
);
618 tcg_gen_brcond_i32(cond
, arg1
, t0
, label_index
);
619 tcg_temp_free_i32(t0
);
623 static inline void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
624 TCGv_i32 arg1
, TCGv_i32 arg2
)
626 if (cond
== TCG_COND_ALWAYS
) {
627 tcg_gen_movi_i32(ret
, 1);
628 } else if (cond
== TCG_COND_NEVER
) {
629 tcg_gen_movi_i32(ret
, 0);
631 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
635 static inline void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
636 TCGv_i32 arg1
, int32_t arg2
)
638 if (cond
== TCG_COND_ALWAYS
) {
639 tcg_gen_movi_i32(ret
, 1);
640 } else if (cond
== TCG_COND_NEVER
) {
641 tcg_gen_movi_i32(ret
, 0);
643 TCGv_i32 t0
= tcg_const_i32(arg2
);
644 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
645 tcg_temp_free_i32(t0
);
649 static inline void tcg_gen_mul_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
651 tcg_gen_op3_i32(INDEX_op_mul_i32
, ret
, arg1
, arg2
);
654 static inline void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
656 TCGv_i32 t0
= tcg_const_i32(arg2
);
657 tcg_gen_mul_i32(ret
, arg1
, t0
);
658 tcg_temp_free_i32(t0
);
661 static inline void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
663 if (TCG_TARGET_HAS_div_i32
) {
664 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
665 } else if (TCG_TARGET_HAS_div2_i32
) {
666 TCGv_i32 t0
= tcg_temp_new_i32();
667 tcg_gen_sari_i32(t0
, arg1
, 31);
668 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
669 tcg_temp_free_i32(t0
);
671 gen_helper_div_i32(ret
, arg1
, arg2
);
675 static inline void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
677 if (TCG_TARGET_HAS_rem_i32
) {
678 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
679 } else if (TCG_TARGET_HAS_div_i32
) {
680 TCGv_i32 t0
= tcg_temp_new_i32();
681 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
682 tcg_gen_mul_i32(t0
, t0
, arg2
);
683 tcg_gen_sub_i32(ret
, arg1
, t0
);
684 tcg_temp_free_i32(t0
);
685 } else if (TCG_TARGET_HAS_div2_i32
) {
686 TCGv_i32 t0
= tcg_temp_new_i32();
687 tcg_gen_sari_i32(t0
, arg1
, 31);
688 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
689 tcg_temp_free_i32(t0
);
691 gen_helper_rem_i32(ret
, arg1
, arg2
);
695 static inline void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
697 if (TCG_TARGET_HAS_div_i32
) {
698 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
699 } else if (TCG_TARGET_HAS_div2_i32
) {
700 TCGv_i32 t0
= tcg_temp_new_i32();
701 tcg_gen_movi_i32(t0
, 0);
702 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
703 tcg_temp_free_i32(t0
);
705 gen_helper_divu_i32(ret
, arg1
, arg2
);
709 static inline void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
711 if (TCG_TARGET_HAS_rem_i32
) {
712 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
713 } else if (TCG_TARGET_HAS_div_i32
) {
714 TCGv_i32 t0
= tcg_temp_new_i32();
715 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
716 tcg_gen_mul_i32(t0
, t0
, arg2
);
717 tcg_gen_sub_i32(ret
, arg1
, t0
);
718 tcg_temp_free_i32(t0
);
719 } else if (TCG_TARGET_HAS_div2_i32
) {
720 TCGv_i32 t0
= tcg_temp_new_i32();
721 tcg_gen_movi_i32(t0
, 0);
722 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
723 tcg_temp_free_i32(t0
);
725 gen_helper_remu_i32(ret
, arg1
, arg2
);
729 #if TCG_TARGET_REG_BITS == 32
731 static inline void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
733 if (!TCGV_EQUAL_I64(ret
, arg
)) {
734 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
735 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
739 static inline void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
741 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
742 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
745 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
746 tcg_target_long offset
)
748 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
749 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
752 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
753 tcg_target_long offset
)
755 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
756 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), 31);
759 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
760 tcg_target_long offset
)
762 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
763 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
766 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
767 tcg_target_long offset
)
769 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
770 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
773 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
774 tcg_target_long offset
)
776 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
777 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
780 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
781 tcg_target_long offset
)
783 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
784 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
787 static inline void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
788 tcg_target_long offset
)
790 /* since arg2 and ret have different types, they cannot be the
792 #ifdef HOST_WORDS_BIGENDIAN
793 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
794 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
796 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
797 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
801 static inline void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
802 tcg_target_long offset
)
804 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
807 static inline void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
808 tcg_target_long offset
)
810 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
813 static inline void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
814 tcg_target_long offset
)
816 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
819 static inline void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
820 tcg_target_long offset
)
822 #ifdef HOST_WORDS_BIGENDIAN
823 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
824 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
826 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
827 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
831 static inline void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
833 tcg_gen_op6_i32(INDEX_op_add2_i32
, TCGV_LOW(ret
), TCGV_HIGH(ret
),
834 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
836 /* Allow the optimizer room to replace add2 with two moves. */
837 tcg_gen_op0(INDEX_op_nop
);
840 static inline void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
842 tcg_gen_op6_i32(INDEX_op_sub2_i32
, TCGV_LOW(ret
), TCGV_HIGH(ret
),
843 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
845 /* Allow the optimizer room to replace sub2 with two moves. */
846 tcg_gen_op0(INDEX_op_nop
);
849 static inline void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
851 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
852 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
855 static inline void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
857 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
858 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
861 static inline void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
863 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
864 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
867 static inline void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
869 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
870 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
873 static inline void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
875 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
876 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
879 static inline void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
881 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
882 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
885 /* XXX: use generic code when basic block handling is OK or CPU
886 specific code (x86) */
887 static inline void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
889 gen_helper_shl_i64(ret
, arg1
, arg2
);
892 static inline void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
894 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
897 static inline void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
899 gen_helper_shr_i64(ret
, arg1
, arg2
);
902 static inline void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
904 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
907 static inline void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
909 gen_helper_sar_i64(ret
, arg1
, arg2
);
912 static inline void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
914 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
917 static inline void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
,
918 TCGv_i64 arg2
, int label_index
)
920 if (cond
== TCG_COND_ALWAYS
) {
921 tcg_gen_br(label_index
);
922 } else if (cond
!= TCG_COND_NEVER
) {
923 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
924 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
925 TCGV_HIGH(arg2
), cond
, label_index
);
929 static inline void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
930 TCGv_i64 arg1
, TCGv_i64 arg2
)
932 if (cond
== TCG_COND_ALWAYS
) {
933 tcg_gen_movi_i32(TCGV_LOW(ret
), 1);
934 } else if (cond
== TCG_COND_NEVER
) {
935 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
937 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
938 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
939 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
941 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
944 static inline void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
949 t0
= tcg_temp_new_i64();
950 t1
= tcg_temp_new_i32();
952 if (TCG_TARGET_HAS_mulu2_i32
) {
953 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, TCGV_LOW(t0
), TCGV_HIGH(t0
),
954 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
955 /* Allow the optimizer room to replace mulu2 with two moves. */
956 tcg_gen_op0(INDEX_op_nop
);
958 tcg_debug_assert(TCG_TARGET_HAS_muluh_i32
);
959 tcg_gen_op3_i32(INDEX_op_mul_i32
, TCGV_LOW(t0
),
960 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
961 tcg_gen_op3_i32(INDEX_op_muluh_i32
, TCGV_HIGH(t0
),
962 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
965 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
966 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
967 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
968 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
970 tcg_gen_mov_i64(ret
, t0
);
971 tcg_temp_free_i64(t0
);
972 tcg_temp_free_i32(t1
);
975 static inline void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
977 gen_helper_div_i64(ret
, arg1
, arg2
);
980 static inline void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
982 gen_helper_rem_i64(ret
, arg1
, arg2
);
985 static inline void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
987 gen_helper_divu_i64(ret
, arg1
, arg2
);
990 static inline void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
992 gen_helper_remu_i64(ret
, arg1
, arg2
);
997 static inline void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
999 if (!TCGV_EQUAL_I64(ret
, arg
))
1000 tcg_gen_op2_i64(INDEX_op_mov_i64
, ret
, arg
);
1003 static inline void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1005 tcg_gen_op2i_i64(INDEX_op_movi_i64
, ret
, arg
);
1008 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1009 tcg_target_long offset
)
1011 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64
, ret
, arg2
, offset
);
1014 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1015 tcg_target_long offset
)
1017 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64
, ret
, arg2
, offset
);
1020 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1021 tcg_target_long offset
)
1023 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64
, ret
, arg2
, offset
);
1026 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1027 tcg_target_long offset
)
1029 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64
, ret
, arg2
, offset
);
1032 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1033 tcg_target_long offset
)
1035 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64
, ret
, arg2
, offset
);
1038 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1039 tcg_target_long offset
)
1041 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64
, ret
, arg2
, offset
);
1044 static inline void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1046 tcg_gen_ldst_op_i64(INDEX_op_ld_i64
, ret
, arg2
, offset
);
1049 static inline void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1050 tcg_target_long offset
)
1052 tcg_gen_ldst_op_i64(INDEX_op_st8_i64
, arg1
, arg2
, offset
);
1055 static inline void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1056 tcg_target_long offset
)
1058 tcg_gen_ldst_op_i64(INDEX_op_st16_i64
, arg1
, arg2
, offset
);
1061 static inline void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1062 tcg_target_long offset
)
1064 tcg_gen_ldst_op_i64(INDEX_op_st32_i64
, arg1
, arg2
, offset
);
1067 static inline void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1069 tcg_gen_ldst_op_i64(INDEX_op_st_i64
, arg1
, arg2
, offset
);
1072 static inline void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1074 tcg_gen_op3_i64(INDEX_op_add_i64
, ret
, arg1
, arg2
);
1077 static inline void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1079 tcg_gen_op3_i64(INDEX_op_sub_i64
, ret
, arg1
, arg2
);
1082 static inline void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1084 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1085 tcg_gen_mov_i64(ret
, arg1
);
1087 tcg_gen_op3_i64(INDEX_op_and_i64
, ret
, arg1
, arg2
);
1091 static inline void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1094 /* Some cases can be optimized here. */
1097 tcg_gen_movi_i64(ret
, 0);
1099 case 0xffffffffffffffffull
:
1100 tcg_gen_mov_i64(ret
, arg1
);
1103 /* Don't recurse with tcg_gen_ext8u_i32. */
1104 if (TCG_TARGET_HAS_ext8u_i64
) {
1105 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1110 if (TCG_TARGET_HAS_ext16u_i64
) {
1111 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1116 if (TCG_TARGET_HAS_ext32u_i64
) {
1117 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1122 t0
= tcg_const_i64(arg2
);
1123 tcg_gen_and_i64(ret
, arg1
, t0
);
1124 tcg_temp_free_i64(t0
);
1127 static inline void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1129 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1130 tcg_gen_mov_i64(ret
, arg1
);
1132 tcg_gen_op3_i64(INDEX_op_or_i64
, ret
, arg1
, arg2
);
1136 static inline void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1138 /* Some cases can be optimized here. */
1140 tcg_gen_movi_i64(ret
, -1);
1141 } else if (arg2
== 0) {
1142 tcg_gen_mov_i64(ret
, arg1
);
1144 TCGv_i64 t0
= tcg_const_i64(arg2
);
1145 tcg_gen_or_i64(ret
, arg1
, t0
);
1146 tcg_temp_free_i64(t0
);
1150 static inline void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1152 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1153 tcg_gen_movi_i64(ret
, 0);
1155 tcg_gen_op3_i64(INDEX_op_xor_i64
, ret
, arg1
, arg2
);
1159 static inline void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1161 /* Some cases can be optimized here. */
1163 tcg_gen_mov_i64(ret
, arg1
);
1164 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1165 /* Don't recurse with tcg_gen_not_i64. */
1166 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1168 TCGv_i64 t0
= tcg_const_i64(arg2
);
1169 tcg_gen_xor_i64(ret
, arg1
, t0
);
1170 tcg_temp_free_i64(t0
);
1174 static inline void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1176 tcg_gen_op3_i64(INDEX_op_shl_i64
, ret
, arg1
, arg2
);
1179 static inline void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1182 tcg_gen_mov_i64(ret
, arg1
);
1184 TCGv_i64 t0
= tcg_const_i64(arg2
);
1185 tcg_gen_shl_i64(ret
, arg1
, t0
);
1186 tcg_temp_free_i64(t0
);
1190 static inline void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1192 tcg_gen_op3_i64(INDEX_op_shr_i64
, ret
, arg1
, arg2
);
1195 static inline void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1198 tcg_gen_mov_i64(ret
, arg1
);
1200 TCGv_i64 t0
= tcg_const_i64(arg2
);
1201 tcg_gen_shr_i64(ret
, arg1
, t0
);
1202 tcg_temp_free_i64(t0
);
1206 static inline void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1208 tcg_gen_op3_i64(INDEX_op_sar_i64
, ret
, arg1
, arg2
);
1211 static inline void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1214 tcg_gen_mov_i64(ret
, arg1
);
1216 TCGv_i64 t0
= tcg_const_i64(arg2
);
1217 tcg_gen_sar_i64(ret
, arg1
, t0
);
1218 tcg_temp_free_i64(t0
);
1222 static inline void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
,
1223 TCGv_i64 arg2
, int label_index
)
1225 if (cond
== TCG_COND_ALWAYS
) {
1226 tcg_gen_br(label_index
);
1227 } else if (cond
!= TCG_COND_NEVER
) {
1228 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
, label_index
);
1232 static inline void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1233 TCGv_i64 arg1
, TCGv_i64 arg2
)
1235 if (cond
== TCG_COND_ALWAYS
) {
1236 tcg_gen_movi_i64(ret
, 1);
1237 } else if (cond
== TCG_COND_NEVER
) {
1238 tcg_gen_movi_i64(ret
, 0);
1240 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1244 static inline void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1246 tcg_gen_op3_i64(INDEX_op_mul_i64
, ret
, arg1
, arg2
);
1249 static inline void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1251 if (TCG_TARGET_HAS_div_i64
) {
1252 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1253 } else if (TCG_TARGET_HAS_div2_i64
) {
1254 TCGv_i64 t0
= tcg_temp_new_i64();
1255 tcg_gen_sari_i64(t0
, arg1
, 63);
1256 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1257 tcg_temp_free_i64(t0
);
1259 gen_helper_div_i64(ret
, arg1
, arg2
);
1263 static inline void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1265 if (TCG_TARGET_HAS_rem_i64
) {
1266 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1267 } else if (TCG_TARGET_HAS_div_i64
) {
1268 TCGv_i64 t0
= tcg_temp_new_i64();
1269 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1270 tcg_gen_mul_i64(t0
, t0
, arg2
);
1271 tcg_gen_sub_i64(ret
, arg1
, t0
);
1272 tcg_temp_free_i64(t0
);
1273 } else if (TCG_TARGET_HAS_div2_i64
) {
1274 TCGv_i64 t0
= tcg_temp_new_i64();
1275 tcg_gen_sari_i64(t0
, arg1
, 63);
1276 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1277 tcg_temp_free_i64(t0
);
1279 gen_helper_rem_i64(ret
, arg1
, arg2
);
1283 static inline void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1285 if (TCG_TARGET_HAS_div_i64
) {
1286 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1287 } else if (TCG_TARGET_HAS_div2_i64
) {
1288 TCGv_i64 t0
= tcg_temp_new_i64();
1289 tcg_gen_movi_i64(t0
, 0);
1290 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1291 tcg_temp_free_i64(t0
);
1293 gen_helper_divu_i64(ret
, arg1
, arg2
);
1297 static inline void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1299 if (TCG_TARGET_HAS_rem_i64
) {
1300 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1301 } else if (TCG_TARGET_HAS_div_i64
) {
1302 TCGv_i64 t0
= tcg_temp_new_i64();
1303 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1304 tcg_gen_mul_i64(t0
, t0
, arg2
);
1305 tcg_gen_sub_i64(ret
, arg1
, t0
);
1306 tcg_temp_free_i64(t0
);
1307 } else if (TCG_TARGET_HAS_div2_i64
) {
1308 TCGv_i64 t0
= tcg_temp_new_i64();
1309 tcg_gen_movi_i64(t0
, 0);
1310 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1311 tcg_temp_free_i64(t0
);
1313 gen_helper_remu_i64(ret
, arg1
, arg2
);
1316 #endif /* TCG_TARGET_REG_BITS == 32 */
1318 static inline void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1320 /* some cases can be optimized here */
1322 tcg_gen_mov_i64(ret
, arg1
);
1324 TCGv_i64 t0
= tcg_const_i64(arg2
);
1325 tcg_gen_add_i64(ret
, arg1
, t0
);
1326 tcg_temp_free_i64(t0
);
1330 static inline void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1332 TCGv_i64 t0
= tcg_const_i64(arg1
);
1333 tcg_gen_sub_i64(ret
, t0
, arg2
);
1334 tcg_temp_free_i64(t0
);
1337 static inline void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1339 /* some cases can be optimized here */
1341 tcg_gen_mov_i64(ret
, arg1
);
1343 TCGv_i64 t0
= tcg_const_i64(arg2
);
1344 tcg_gen_sub_i64(ret
, arg1
, t0
);
1345 tcg_temp_free_i64(t0
);
1348 static inline void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
,
1349 int64_t arg2
, int label_index
)
1351 if (cond
== TCG_COND_ALWAYS
) {
1352 tcg_gen_br(label_index
);
1353 } else if (cond
!= TCG_COND_NEVER
) {
1354 TCGv_i64 t0
= tcg_const_i64(arg2
);
1355 tcg_gen_brcond_i64(cond
, arg1
, t0
, label_index
);
1356 tcg_temp_free_i64(t0
);
1360 static inline void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1361 TCGv_i64 arg1
, int64_t arg2
)
1363 TCGv_i64 t0
= tcg_const_i64(arg2
);
1364 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1365 tcg_temp_free_i64(t0
);
1368 static inline void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1370 TCGv_i64 t0
= tcg_const_i64(arg2
);
1371 tcg_gen_mul_i64(ret
, arg1
, t0
);
1372 tcg_temp_free_i64(t0
);
1376 /***************************************/
1377 /* optional operations */
1379 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1381 if (TCG_TARGET_HAS_ext8s_i32
) {
1382 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1384 tcg_gen_shli_i32(ret
, arg
, 24);
1385 tcg_gen_sari_i32(ret
, ret
, 24);
1389 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1391 if (TCG_TARGET_HAS_ext16s_i32
) {
1392 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1394 tcg_gen_shli_i32(ret
, arg
, 16);
1395 tcg_gen_sari_i32(ret
, ret
, 16);
1399 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1401 if (TCG_TARGET_HAS_ext8u_i32
) {
1402 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1404 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1408 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1410 if (TCG_TARGET_HAS_ext16u_i32
) {
1411 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1413 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1417 /* Note: we assume the two high bytes are set to zero */
1418 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1420 if (TCG_TARGET_HAS_bswap16_i32
) {
1421 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
1423 TCGv_i32 t0
= tcg_temp_new_i32();
1425 tcg_gen_ext8u_i32(t0
, arg
);
1426 tcg_gen_shli_i32(t0
, t0
, 8);
1427 tcg_gen_shri_i32(ret
, arg
, 8);
1428 tcg_gen_or_i32(ret
, ret
, t0
);
1429 tcg_temp_free_i32(t0
);
1433 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1435 if (TCG_TARGET_HAS_bswap32_i32
) {
1436 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
1439 t0
= tcg_temp_new_i32();
1440 t1
= tcg_temp_new_i32();
1442 tcg_gen_shli_i32(t0
, arg
, 24);
1444 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
1445 tcg_gen_shli_i32(t1
, t1
, 8);
1446 tcg_gen_or_i32(t0
, t0
, t1
);
1448 tcg_gen_shri_i32(t1
, arg
, 8);
1449 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
1450 tcg_gen_or_i32(t0
, t0
, t1
);
1452 tcg_gen_shri_i32(t1
, arg
, 24);
1453 tcg_gen_or_i32(ret
, t0
, t1
);
1454 tcg_temp_free_i32(t0
);
1455 tcg_temp_free_i32(t1
);
1459 #if TCG_TARGET_REG_BITS == 32
1460 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1462 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1463 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1466 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1468 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1469 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1472 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1474 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1475 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1478 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1480 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1481 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1484 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1486 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1487 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1490 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1492 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1493 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1496 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
,
1499 tcg_debug_assert(count
< 64);
1501 tcg_gen_shri_i32(ret
, TCGV_HIGH(arg
), count
- 32);
1502 } else if (count
== 0) {
1503 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
1505 TCGv_i64 t
= tcg_temp_new_i64();
1506 tcg_gen_shri_i64(t
, arg
, count
);
1507 tcg_gen_mov_i32(ret
, TCGV_LOW(t
));
1508 tcg_temp_free_i64(t
);
1512 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1514 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1515 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1518 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1520 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1521 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1524 /* Note: we assume the six high bytes are set to zero */
1525 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1527 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1528 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1531 /* Note: we assume the four high bytes are set to zero */
1532 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1534 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1535 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1538 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1541 t0
= tcg_temp_new_i32();
1542 t1
= tcg_temp_new_i32();
1544 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1545 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1546 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1547 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1548 tcg_temp_free_i32(t0
);
1549 tcg_temp_free_i32(t1
);
1553 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1555 if (TCG_TARGET_HAS_ext8s_i64
) {
1556 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1558 tcg_gen_shli_i64(ret
, arg
, 56);
1559 tcg_gen_sari_i64(ret
, ret
, 56);
1563 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1565 if (TCG_TARGET_HAS_ext16s_i64
) {
1566 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1568 tcg_gen_shli_i64(ret
, arg
, 48);
1569 tcg_gen_sari_i64(ret
, ret
, 48);
1573 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1575 if (TCG_TARGET_HAS_ext32s_i64
) {
1576 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1578 tcg_gen_shli_i64(ret
, arg
, 32);
1579 tcg_gen_sari_i64(ret
, ret
, 32);
1583 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1585 if (TCG_TARGET_HAS_ext8u_i64
) {
1586 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1588 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1592 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1594 if (TCG_TARGET_HAS_ext16u_i64
) {
1595 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1597 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1601 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1603 if (TCG_TARGET_HAS_ext32u_i64
) {
1604 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1606 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1610 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
,
1613 tcg_debug_assert(count
< 64);
1614 if (TCG_TARGET_HAS_trunc_shr_i32
) {
1615 tcg_gen_op3i_i32(INDEX_op_trunc_shr_i32
, ret
,
1616 MAKE_TCGV_I32(GET_TCGV_I64(arg
)), count
);
1617 } else if (count
== 0) {
1618 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(arg
)));
1620 TCGv_i64 t
= tcg_temp_new_i64();
1621 tcg_gen_shri_i64(t
, arg
, count
);
1622 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(t
)));
1623 tcg_temp_free_i64(t
);
1627 /* Note: we assume the target supports move between 32 and 64 bit
1629 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1631 tcg_gen_ext32u_i64(ret
, MAKE_TCGV_I64(GET_TCGV_I32(arg
)));
1634 /* Note: we assume the target supports move between 32 and 64 bit
1636 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1638 tcg_gen_ext32s_i64(ret
, MAKE_TCGV_I64(GET_TCGV_I32(arg
)));
1641 /* Note: we assume the six high bytes are set to zero */
1642 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1644 if (TCG_TARGET_HAS_bswap16_i64
) {
1645 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1647 TCGv_i64 t0
= tcg_temp_new_i64();
1649 tcg_gen_ext8u_i64(t0
, arg
);
1650 tcg_gen_shli_i64(t0
, t0
, 8);
1651 tcg_gen_shri_i64(ret
, arg
, 8);
1652 tcg_gen_or_i64(ret
, ret
, t0
);
1653 tcg_temp_free_i64(t0
);
1657 /* Note: we assume the four high bytes are set to zero */
1658 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1660 if (TCG_TARGET_HAS_bswap32_i64
) {
1661 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1664 t0
= tcg_temp_new_i64();
1665 t1
= tcg_temp_new_i64();
1667 tcg_gen_shli_i64(t0
, arg
, 24);
1668 tcg_gen_ext32u_i64(t0
, t0
);
1670 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1671 tcg_gen_shli_i64(t1
, t1
, 8);
1672 tcg_gen_or_i64(t0
, t0
, t1
);
1674 tcg_gen_shri_i64(t1
, arg
, 8);
1675 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1676 tcg_gen_or_i64(t0
, t0
, t1
);
1678 tcg_gen_shri_i64(t1
, arg
, 24);
1679 tcg_gen_or_i64(ret
, t0
, t1
);
1680 tcg_temp_free_i64(t0
);
1681 tcg_temp_free_i64(t1
);
1685 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1687 if (TCG_TARGET_HAS_bswap64_i64
) {
1688 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1690 TCGv_i64 t0
= tcg_temp_new_i64();
1691 TCGv_i64 t1
= tcg_temp_new_i64();
1693 tcg_gen_shli_i64(t0
, arg
, 56);
1695 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1696 tcg_gen_shli_i64(t1
, t1
, 40);
1697 tcg_gen_or_i64(t0
, t0
, t1
);
1699 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1700 tcg_gen_shli_i64(t1
, t1
, 24);
1701 tcg_gen_or_i64(t0
, t0
, t1
);
1703 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1704 tcg_gen_shli_i64(t1
, t1
, 8);
1705 tcg_gen_or_i64(t0
, t0
, t1
);
1707 tcg_gen_shri_i64(t1
, arg
, 8);
1708 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1709 tcg_gen_or_i64(t0
, t0
, t1
);
1711 tcg_gen_shri_i64(t1
, arg
, 24);
1712 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1713 tcg_gen_or_i64(t0
, t0
, t1
);
1715 tcg_gen_shri_i64(t1
, arg
, 40);
1716 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1717 tcg_gen_or_i64(t0
, t0
, t1
);
1719 tcg_gen_shri_i64(t1
, arg
, 56);
1720 tcg_gen_or_i64(ret
, t0
, t1
);
1721 tcg_temp_free_i64(t0
);
1722 tcg_temp_free_i64(t1
);
1728 static inline void tcg_gen_neg_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1730 if (TCG_TARGET_HAS_neg_i32
) {
1731 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg
);
1733 TCGv_i32 t0
= tcg_const_i32(0);
1734 tcg_gen_sub_i32(ret
, t0
, arg
);
1735 tcg_temp_free_i32(t0
);
1739 static inline void tcg_gen_neg_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1741 if (TCG_TARGET_HAS_neg_i64
) {
1742 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg
);
1744 TCGv_i64 t0
= tcg_const_i64(0);
1745 tcg_gen_sub_i64(ret
, t0
, arg
);
1746 tcg_temp_free_i64(t0
);
1750 static inline void tcg_gen_not_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1752 if (TCG_TARGET_HAS_not_i32
) {
1753 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg
);
1755 tcg_gen_xori_i32(ret
, arg
, -1);
1759 static inline void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1761 #if TCG_TARGET_REG_BITS == 64
1762 if (TCG_TARGET_HAS_not_i64
) {
1763 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1765 tcg_gen_xori_i64(ret
, arg
, -1);
1768 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1769 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1773 static inline void tcg_gen_discard_i32(TCGv_i32 arg
)
1775 tcg_gen_op1_i32(INDEX_op_discard
, arg
);
1778 static inline void tcg_gen_discard_i64(TCGv_i64 arg
)
1780 #if TCG_TARGET_REG_BITS == 32
1781 tcg_gen_discard_i32(TCGV_LOW(arg
));
1782 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1784 tcg_gen_op1_i64(INDEX_op_discard
, arg
);
1788 static inline void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1790 if (TCG_TARGET_HAS_andc_i32
) {
1791 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
1793 TCGv_i32 t0
= tcg_temp_new_i32();
1794 tcg_gen_not_i32(t0
, arg2
);
1795 tcg_gen_and_i32(ret
, arg1
, t0
);
1796 tcg_temp_free_i32(t0
);
1800 static inline void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1802 #if TCG_TARGET_REG_BITS == 64
1803 if (TCG_TARGET_HAS_andc_i64
) {
1804 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1806 TCGv_i64 t0
= tcg_temp_new_i64();
1807 tcg_gen_not_i64(t0
, arg2
);
1808 tcg_gen_and_i64(ret
, arg1
, t0
);
1809 tcg_temp_free_i64(t0
);
1812 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1813 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1817 static inline void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1819 if (TCG_TARGET_HAS_eqv_i32
) {
1820 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
1822 tcg_gen_xor_i32(ret
, arg1
, arg2
);
1823 tcg_gen_not_i32(ret
, ret
);
1827 static inline void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1829 #if TCG_TARGET_REG_BITS == 64
1830 if (TCG_TARGET_HAS_eqv_i64
) {
1831 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1833 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1834 tcg_gen_not_i64(ret
, ret
);
1837 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1838 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1842 static inline void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1844 if (TCG_TARGET_HAS_nand_i32
) {
1845 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
1847 tcg_gen_and_i32(ret
, arg1
, arg2
);
1848 tcg_gen_not_i32(ret
, ret
);
1852 static inline void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1854 #if TCG_TARGET_REG_BITS == 64
1855 if (TCG_TARGET_HAS_nand_i64
) {
1856 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1858 tcg_gen_and_i64(ret
, arg1
, arg2
);
1859 tcg_gen_not_i64(ret
, ret
);
1862 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1863 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1867 static inline void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1869 if (TCG_TARGET_HAS_nor_i32
) {
1870 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
1872 tcg_gen_or_i32(ret
, arg1
, arg2
);
1873 tcg_gen_not_i32(ret
, ret
);
1877 static inline void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1879 #if TCG_TARGET_REG_BITS == 64
1880 if (TCG_TARGET_HAS_nor_i64
) {
1881 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1883 tcg_gen_or_i64(ret
, arg1
, arg2
);
1884 tcg_gen_not_i64(ret
, ret
);
1887 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1888 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1892 static inline void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1894 if (TCG_TARGET_HAS_orc_i32
) {
1895 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
1897 TCGv_i32 t0
= tcg_temp_new_i32();
1898 tcg_gen_not_i32(t0
, arg2
);
1899 tcg_gen_or_i32(ret
, arg1
, t0
);
1900 tcg_temp_free_i32(t0
);
1904 static inline void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1906 #if TCG_TARGET_REG_BITS == 64
1907 if (TCG_TARGET_HAS_orc_i64
) {
1908 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1910 TCGv_i64 t0
= tcg_temp_new_i64();
1911 tcg_gen_not_i64(t0
, arg2
);
1912 tcg_gen_or_i64(ret
, arg1
, t0
);
1913 tcg_temp_free_i64(t0
);
1916 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1917 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1921 static inline void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1923 if (TCG_TARGET_HAS_rot_i32
) {
1924 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
1928 t0
= tcg_temp_new_i32();
1929 t1
= tcg_temp_new_i32();
1930 tcg_gen_shl_i32(t0
, arg1
, arg2
);
1931 tcg_gen_subfi_i32(t1
, 32, arg2
);
1932 tcg_gen_shr_i32(t1
, arg1
, t1
);
1933 tcg_gen_or_i32(ret
, t0
, t1
);
1934 tcg_temp_free_i32(t0
);
1935 tcg_temp_free_i32(t1
);
1939 static inline void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1941 if (TCG_TARGET_HAS_rot_i64
) {
1942 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1945 t0
= tcg_temp_new_i64();
1946 t1
= tcg_temp_new_i64();
1947 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1948 tcg_gen_subfi_i64(t1
, 64, arg2
);
1949 tcg_gen_shr_i64(t1
, arg1
, t1
);
1950 tcg_gen_or_i64(ret
, t0
, t1
);
1951 tcg_temp_free_i64(t0
);
1952 tcg_temp_free_i64(t1
);
1956 static inline void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
1958 /* some cases can be optimized here */
1960 tcg_gen_mov_i32(ret
, arg1
);
1961 } else if (TCG_TARGET_HAS_rot_i32
) {
1962 TCGv_i32 t0
= tcg_const_i32(arg2
);
1963 tcg_gen_rotl_i32(ret
, arg1
, t0
);
1964 tcg_temp_free_i32(t0
);
1967 t0
= tcg_temp_new_i32();
1968 t1
= tcg_temp_new_i32();
1969 tcg_gen_shli_i32(t0
, arg1
, arg2
);
1970 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
1971 tcg_gen_or_i32(ret
, t0
, t1
);
1972 tcg_temp_free_i32(t0
);
1973 tcg_temp_free_i32(t1
);
1977 static inline void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1979 /* some cases can be optimized here */
1981 tcg_gen_mov_i64(ret
, arg1
);
1982 } else if (TCG_TARGET_HAS_rot_i64
) {
1983 TCGv_i64 t0
= tcg_const_i64(arg2
);
1984 tcg_gen_rotl_i64(ret
, arg1
, t0
);
1985 tcg_temp_free_i64(t0
);
1988 t0
= tcg_temp_new_i64();
1989 t1
= tcg_temp_new_i64();
1990 tcg_gen_shli_i64(t0
, arg1
, arg2
);
1991 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
1992 tcg_gen_or_i64(ret
, t0
, t1
);
1993 tcg_temp_free_i64(t0
);
1994 tcg_temp_free_i64(t1
);
1998 static inline void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
2000 if (TCG_TARGET_HAS_rot_i32
) {
2001 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
2005 t0
= tcg_temp_new_i32();
2006 t1
= tcg_temp_new_i32();
2007 tcg_gen_shr_i32(t0
, arg1
, arg2
);
2008 tcg_gen_subfi_i32(t1
, 32, arg2
);
2009 tcg_gen_shl_i32(t1
, arg1
, t1
);
2010 tcg_gen_or_i32(ret
, t0
, t1
);
2011 tcg_temp_free_i32(t0
);
2012 tcg_temp_free_i32(t1
);
2016 static inline void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2018 if (TCG_TARGET_HAS_rot_i64
) {
2019 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2022 t0
= tcg_temp_new_i64();
2023 t1
= tcg_temp_new_i64();
2024 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2025 tcg_gen_subfi_i64(t1
, 64, arg2
);
2026 tcg_gen_shl_i64(t1
, arg1
, t1
);
2027 tcg_gen_or_i64(ret
, t0
, t1
);
2028 tcg_temp_free_i64(t0
);
2029 tcg_temp_free_i64(t1
);
2033 static inline void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
2035 /* some cases can be optimized here */
2037 tcg_gen_mov_i32(ret
, arg1
);
2039 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
2043 static inline void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2045 /* some cases can be optimized here */
2047 tcg_gen_mov_i64(ret
, arg1
);
2049 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2053 static inline void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
,
2054 TCGv_i32 arg2
, unsigned int ofs
,
2060 tcg_debug_assert(ofs
< 32);
2061 tcg_debug_assert(len
<= 32);
2062 tcg_debug_assert(ofs
+ len
<= 32);
2064 if (ofs
== 0 && len
== 32) {
2065 tcg_gen_mov_i32(ret
, arg2
);
2068 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
2069 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
2073 mask
= (1u << len
) - 1;
2074 t1
= tcg_temp_new_i32();
2076 if (ofs
+ len
< 32) {
2077 tcg_gen_andi_i32(t1
, arg2
, mask
);
2078 tcg_gen_shli_i32(t1
, t1
, ofs
);
2080 tcg_gen_shli_i32(t1
, arg2
, ofs
);
2082 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
2083 tcg_gen_or_i32(ret
, ret
, t1
);
2085 tcg_temp_free_i32(t1
);
2088 static inline void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
2089 TCGv_i64 arg2
, unsigned int ofs
,
2095 tcg_debug_assert(ofs
< 64);
2096 tcg_debug_assert(len
<= 64);
2097 tcg_debug_assert(ofs
+ len
<= 64);
2099 if (ofs
== 0 && len
== 64) {
2100 tcg_gen_mov_i64(ret
, arg2
);
2103 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2104 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2108 #if TCG_TARGET_REG_BITS == 32
2110 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2111 TCGV_LOW(arg2
), ofs
- 32, len
);
2112 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2115 if (ofs
+ len
<= 32) {
2116 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2117 TCGV_LOW(arg2
), ofs
, len
);
2118 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2123 mask
= (1ull << len
) - 1;
2124 t1
= tcg_temp_new_i64();
2126 if (ofs
+ len
< 64) {
2127 tcg_gen_andi_i64(t1
, arg2
, mask
);
2128 tcg_gen_shli_i64(t1
, t1
, ofs
);
2130 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2132 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2133 tcg_gen_or_i64(ret
, ret
, t1
);
2135 tcg_temp_free_i64(t1
);
2138 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
,
2141 #if TCG_TARGET_REG_BITS == 32
2142 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2143 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2145 TCGv_i64 tmp
= tcg_temp_new_i64();
2146 /* These extensions are only needed for type correctness.
2147 We may be able to do better given target specific information. */
2148 tcg_gen_extu_i32_i64(tmp
, high
);
2149 tcg_gen_extu_i32_i64(dest
, low
);
2150 /* If deposit is available, use it. Otherwise use the extra
2151 knowledge that we have of the zero-extensions above. */
2152 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2153 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2155 tcg_gen_shli_i64(tmp
, tmp
, 32);
2156 tcg_gen_or_i64(dest
, dest
, tmp
);
2158 tcg_temp_free_i64(tmp
);
2162 static inline void tcg_gen_concat32_i64(TCGv_i64 dest
, TCGv_i64 low
,
2165 tcg_gen_deposit_i64(dest
, low
, high
, 32, 32);
2168 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2170 tcg_gen_trunc_shr_i64_i32(ret
, arg
, 0);
2173 static inline void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2175 tcg_gen_trunc_shr_i64_i32(lo
, arg
, 0);
2176 tcg_gen_trunc_shr_i64_i32(hi
, arg
, 32);
2179 static inline void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2181 tcg_gen_ext32u_i64(lo
, arg
);
2182 tcg_gen_shri_i64(hi
, arg
, 32);
2185 static inline void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
,
2186 TCGv_i32 c1
, TCGv_i32 c2
,
2187 TCGv_i32 v1
, TCGv_i32 v2
)
2189 if (TCG_TARGET_HAS_movcond_i32
) {
2190 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
2192 TCGv_i32 t0
= tcg_temp_new_i32();
2193 TCGv_i32 t1
= tcg_temp_new_i32();
2194 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
2195 tcg_gen_neg_i32(t0
, t0
);
2196 tcg_gen_and_i32(t1
, v1
, t0
);
2197 tcg_gen_andc_i32(ret
, v2
, t0
);
2198 tcg_gen_or_i32(ret
, ret
, t1
);
2199 tcg_temp_free_i32(t0
);
2200 tcg_temp_free_i32(t1
);
2204 static inline void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
,
2205 TCGv_i64 c1
, TCGv_i64 c2
,
2206 TCGv_i64 v1
, TCGv_i64 v2
)
2208 #if TCG_TARGET_REG_BITS == 32
2209 TCGv_i32 t0
= tcg_temp_new_i32();
2210 TCGv_i32 t1
= tcg_temp_new_i32();
2211 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2212 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2213 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2215 if (TCG_TARGET_HAS_movcond_i32
) {
2216 tcg_gen_movi_i32(t1
, 0);
2217 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2218 TCGV_LOW(v1
), TCGV_LOW(v2
));
2219 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2220 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2222 tcg_gen_neg_i32(t0
, t0
);
2224 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2225 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2226 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2228 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2229 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2230 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2232 tcg_temp_free_i32(t0
);
2233 tcg_temp_free_i32(t1
);
2235 if (TCG_TARGET_HAS_movcond_i64
) {
2236 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2238 TCGv_i64 t0
= tcg_temp_new_i64();
2239 TCGv_i64 t1
= tcg_temp_new_i64();
2240 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2241 tcg_gen_neg_i64(t0
, t0
);
2242 tcg_gen_and_i64(t1
, v1
, t0
);
2243 tcg_gen_andc_i64(ret
, v2
, t0
);
2244 tcg_gen_or_i64(ret
, ret
, t1
);
2245 tcg_temp_free_i64(t0
);
2246 tcg_temp_free_i64(t1
);
2251 static inline void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
2252 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
2254 if (TCG_TARGET_HAS_add2_i32
) {
2255 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
2256 /* Allow the optimizer room to replace add2 with two moves. */
2257 tcg_gen_op0(INDEX_op_nop
);
2259 TCGv_i64 t0
= tcg_temp_new_i64();
2260 TCGv_i64 t1
= tcg_temp_new_i64();
2261 tcg_gen_concat_i32_i64(t0
, al
, ah
);
2262 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
2263 tcg_gen_add_i64(t0
, t0
, t1
);
2264 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
2265 tcg_temp_free_i64(t0
);
2266 tcg_temp_free_i64(t1
);
2270 static inline void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
2271 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
2273 if (TCG_TARGET_HAS_sub2_i32
) {
2274 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
2275 /* Allow the optimizer room to replace sub2 with two moves. */
2276 tcg_gen_op0(INDEX_op_nop
);
2278 TCGv_i64 t0
= tcg_temp_new_i64();
2279 TCGv_i64 t1
= tcg_temp_new_i64();
2280 tcg_gen_concat_i32_i64(t0
, al
, ah
);
2281 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
2282 tcg_gen_sub_i64(t0
, t0
, t1
);
2283 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
2284 tcg_temp_free_i64(t0
);
2285 tcg_temp_free_i64(t1
);
2289 static inline void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
,
2290 TCGv_i32 arg1
, TCGv_i32 arg2
)
2292 if (TCG_TARGET_HAS_mulu2_i32
) {
2293 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
2294 /* Allow the optimizer room to replace mulu2 with two moves. */
2295 tcg_gen_op0(INDEX_op_nop
);
2296 } else if (TCG_TARGET_HAS_muluh_i32
) {
2297 TCGv_i32 t
= tcg_temp_new_i32();
2298 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
2299 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
2300 tcg_gen_mov_i32(rl
, t
);
2301 tcg_temp_free_i32(t
);
2303 TCGv_i64 t0
= tcg_temp_new_i64();
2304 TCGv_i64 t1
= tcg_temp_new_i64();
2305 tcg_gen_extu_i32_i64(t0
, arg1
);
2306 tcg_gen_extu_i32_i64(t1
, arg2
);
2307 tcg_gen_mul_i64(t0
, t0
, t1
);
2308 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
2309 tcg_temp_free_i64(t0
);
2310 tcg_temp_free_i64(t1
);
2314 static inline void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
,
2315 TCGv_i32 arg1
, TCGv_i32 arg2
)
2317 if (TCG_TARGET_HAS_muls2_i32
) {
2318 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
2319 /* Allow the optimizer room to replace muls2 with two moves. */
2320 tcg_gen_op0(INDEX_op_nop
);
2321 } else if (TCG_TARGET_HAS_mulsh_i32
) {
2322 TCGv_i32 t
= tcg_temp_new_i32();
2323 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
2324 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
2325 tcg_gen_mov_i32(rl
, t
);
2326 tcg_temp_free_i32(t
);
2327 } else if (TCG_TARGET_REG_BITS
== 32) {
2328 TCGv_i32 t0
= tcg_temp_new_i32();
2329 TCGv_i32 t1
= tcg_temp_new_i32();
2330 TCGv_i32 t2
= tcg_temp_new_i32();
2331 TCGv_i32 t3
= tcg_temp_new_i32();
2332 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
2333 /* Adjust for negative inputs. */
2334 tcg_gen_sari_i32(t2
, arg1
, 31);
2335 tcg_gen_sari_i32(t3
, arg2
, 31);
2336 tcg_gen_and_i32(t2
, t2
, arg2
);
2337 tcg_gen_and_i32(t3
, t3
, arg1
);
2338 tcg_gen_sub_i32(rh
, t1
, t2
);
2339 tcg_gen_sub_i32(rh
, rh
, t3
);
2340 tcg_gen_mov_i32(rl
, t0
);
2341 tcg_temp_free_i32(t0
);
2342 tcg_temp_free_i32(t1
);
2343 tcg_temp_free_i32(t2
);
2344 tcg_temp_free_i32(t3
);
2346 TCGv_i64 t0
= tcg_temp_new_i64();
2347 TCGv_i64 t1
= tcg_temp_new_i64();
2348 tcg_gen_ext_i32_i64(t0
, arg1
);
2349 tcg_gen_ext_i32_i64(t1
, arg2
);
2350 tcg_gen_mul_i64(t0
, t0
, t1
);
2351 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
2352 tcg_temp_free_i64(t0
);
2353 tcg_temp_free_i64(t1
);
2357 static inline void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2358 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2360 if (TCG_TARGET_HAS_add2_i64
) {
2361 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2362 /* Allow the optimizer room to replace add2 with two moves. */
2363 tcg_gen_op0(INDEX_op_nop
);
2365 TCGv_i64 t0
= tcg_temp_new_i64();
2366 TCGv_i64 t1
= tcg_temp_new_i64();
2367 tcg_gen_add_i64(t0
, al
, bl
);
2368 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2369 tcg_gen_add_i64(rh
, ah
, bh
);
2370 tcg_gen_add_i64(rh
, rh
, t1
);
2371 tcg_gen_mov_i64(rl
, t0
);
2372 tcg_temp_free_i64(t0
);
2373 tcg_temp_free_i64(t1
);
2377 static inline void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2378 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2380 if (TCG_TARGET_HAS_sub2_i64
) {
2381 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2382 /* Allow the optimizer room to replace sub2 with two moves. */
2383 tcg_gen_op0(INDEX_op_nop
);
2385 TCGv_i64 t0
= tcg_temp_new_i64();
2386 TCGv_i64 t1
= tcg_temp_new_i64();
2387 tcg_gen_sub_i64(t0
, al
, bl
);
2388 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2389 tcg_gen_sub_i64(rh
, ah
, bh
);
2390 tcg_gen_sub_i64(rh
, rh
, t1
);
2391 tcg_gen_mov_i64(rl
, t0
);
2392 tcg_temp_free_i64(t0
);
2393 tcg_temp_free_i64(t1
);
2397 static inline void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
,
2398 TCGv_i64 arg1
, TCGv_i64 arg2
)
2400 if (TCG_TARGET_HAS_mulu2_i64
) {
2401 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2402 /* Allow the optimizer room to replace mulu2 with two moves. */
2403 tcg_gen_op0(INDEX_op_nop
);
2404 } else if (TCG_TARGET_HAS_muluh_i64
) {
2405 TCGv_i64 t
= tcg_temp_new_i64();
2406 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2407 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2408 tcg_gen_mov_i64(rl
, t
);
2409 tcg_temp_free_i64(t
);
2411 TCGv_i64 t0
= tcg_temp_new_i64();
2412 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2413 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2414 tcg_gen_mov_i64(rl
, t0
);
2415 tcg_temp_free_i64(t0
);
2419 static inline void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
,
2420 TCGv_i64 arg1
, TCGv_i64 arg2
)
2422 if (TCG_TARGET_HAS_muls2_i64
) {
2423 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2424 /* Allow the optimizer room to replace muls2 with two moves. */
2425 tcg_gen_op0(INDEX_op_nop
);
2426 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2427 TCGv_i64 t
= tcg_temp_new_i64();
2428 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2429 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2430 tcg_gen_mov_i64(rl
, t
);
2431 tcg_temp_free_i64(t
);
2432 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2433 TCGv_i64 t0
= tcg_temp_new_i64();
2434 TCGv_i64 t1
= tcg_temp_new_i64();
2435 TCGv_i64 t2
= tcg_temp_new_i64();
2436 TCGv_i64 t3
= tcg_temp_new_i64();
2437 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2438 /* Adjust for negative inputs. */
2439 tcg_gen_sari_i64(t2
, arg1
, 63);
2440 tcg_gen_sari_i64(t3
, arg2
, 63);
2441 tcg_gen_and_i64(t2
, t2
, arg2
);
2442 tcg_gen_and_i64(t3
, t3
, arg1
);
2443 tcg_gen_sub_i64(rh
, t1
, t2
);
2444 tcg_gen_sub_i64(rh
, rh
, t3
);
2445 tcg_gen_mov_i64(rl
, t0
);
2446 tcg_temp_free_i64(t0
);
2447 tcg_temp_free_i64(t1
);
2448 tcg_temp_free_i64(t2
);
2449 tcg_temp_free_i64(t3
);
2451 TCGv_i64 t0
= tcg_temp_new_i64();
2452 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2453 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2454 tcg_gen_mov_i64(rl
, t0
);
2455 tcg_temp_free_i64(t0
);
2459 /***************************************/
2460 /* QEMU specific operations. Their type depend on the QEMU CPU
2462 #ifndef TARGET_LONG_BITS
2463 #error must include QEMU headers
2466 #if TARGET_LONG_BITS == 32
2467 #define TCGv TCGv_i32
2468 #define tcg_temp_new() tcg_temp_new_i32()
2469 #define tcg_global_reg_new tcg_global_reg_new_i32
2470 #define tcg_global_mem_new tcg_global_mem_new_i32
2471 #define tcg_temp_local_new() tcg_temp_local_new_i32()
2472 #define tcg_temp_free tcg_temp_free_i32
2473 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2474 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x)
2475 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2476 #define tcg_add_param_tl tcg_add_param_i32
2477 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i32
2478 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i32
2480 #define TCGv TCGv_i64
2481 #define tcg_temp_new() tcg_temp_new_i64()
2482 #define tcg_global_reg_new tcg_global_reg_new_i64
2483 #define tcg_global_mem_new tcg_global_mem_new_i64
2484 #define tcg_temp_local_new() tcg_temp_local_new_i64()
2485 #define tcg_temp_free tcg_temp_free_i64
2486 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2487 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x)
2488 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2489 #define tcg_add_param_tl tcg_add_param_i64
2490 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i64
2491 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i64
2494 /* debug info: write the PC of the corresponding QEMU CPU instruction */
2495 static inline void tcg_gen_debug_insn_start(uint64_t pc
)
2497 /* XXX: must really use a 32 bit size for TCGArg in all cases */
2498 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2499 tcg_gen_op2ii(INDEX_op_debug_insn_start
,
2500 (uint32_t)(pc
), (uint32_t)(pc
>> 32));
2502 tcg_gen_op1i(INDEX_op_debug_insn_start
, pc
);
2506 static inline void tcg_gen_exit_tb(uintptr_t val
)
2508 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2511 static inline void tcg_gen_goto_tb(unsigned idx
)
2513 /* We only support two chained exits. */
2514 tcg_debug_assert(idx
<= 1);
2515 #ifdef CONFIG_DEBUG_TCG
2516 /* Verify that we havn't seen this numbered exit before. */
2517 tcg_debug_assert((tcg_ctx
.goto_tb_issue_mask
& (1 << idx
)) == 0);
2518 tcg_ctx
.goto_tb_issue_mask
|= 1 << idx
;
2520 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2524 void tcg_gen_qemu_ld_i32(TCGv_i32
, TCGv
, TCGArg
, TCGMemOp
);
2525 void tcg_gen_qemu_st_i32(TCGv_i32
, TCGv
, TCGArg
, TCGMemOp
);
2526 void tcg_gen_qemu_ld_i64(TCGv_i64
, TCGv
, TCGArg
, TCGMemOp
);
2527 void tcg_gen_qemu_st_i64(TCGv_i64
, TCGv
, TCGArg
, TCGMemOp
);
2529 static inline void tcg_gen_qemu_ld8u(TCGv ret
, TCGv addr
, int mem_index
)
2531 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_UB
);
2534 static inline void tcg_gen_qemu_ld8s(TCGv ret
, TCGv addr
, int mem_index
)
2536 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_SB
);
2539 static inline void tcg_gen_qemu_ld16u(TCGv ret
, TCGv addr
, int mem_index
)
2541 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_TEUW
);
2544 static inline void tcg_gen_qemu_ld16s(TCGv ret
, TCGv addr
, int mem_index
)
2546 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_TESW
);
2549 static inline void tcg_gen_qemu_ld32u(TCGv ret
, TCGv addr
, int mem_index
)
2551 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_TEUL
);
2554 static inline void tcg_gen_qemu_ld32s(TCGv ret
, TCGv addr
, int mem_index
)
2556 tcg_gen_qemu_ld_tl(ret
, addr
, mem_index
, MO_TESL
);
2559 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret
, TCGv addr
, int mem_index
)
2561 tcg_gen_qemu_ld_i64(ret
, addr
, mem_index
, MO_TEQ
);
2564 static inline void tcg_gen_qemu_st8(TCGv arg
, TCGv addr
, int mem_index
)
2566 tcg_gen_qemu_st_tl(arg
, addr
, mem_index
, MO_UB
);
2569 static inline void tcg_gen_qemu_st16(TCGv arg
, TCGv addr
, int mem_index
)
2571 tcg_gen_qemu_st_tl(arg
, addr
, mem_index
, MO_TEUW
);
2574 static inline void tcg_gen_qemu_st32(TCGv arg
, TCGv addr
, int mem_index
)
2576 tcg_gen_qemu_st_tl(arg
, addr
, mem_index
, MO_TEUL
);
2579 static inline void tcg_gen_qemu_st64(TCGv_i64 arg
, TCGv addr
, int mem_index
)
2581 tcg_gen_qemu_st_i64(arg
, addr
, mem_index
, MO_TEQ
);
2584 #if TARGET_LONG_BITS == 64
2585 #define tcg_gen_movi_tl tcg_gen_movi_i64
2586 #define tcg_gen_mov_tl tcg_gen_mov_i64
2587 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2588 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2589 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2590 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2591 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2592 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2593 #define tcg_gen_ld_tl tcg_gen_ld_i64
2594 #define tcg_gen_st8_tl tcg_gen_st8_i64
2595 #define tcg_gen_st16_tl tcg_gen_st16_i64
2596 #define tcg_gen_st32_tl tcg_gen_st32_i64
2597 #define tcg_gen_st_tl tcg_gen_st_i64
2598 #define tcg_gen_add_tl tcg_gen_add_i64
2599 #define tcg_gen_addi_tl tcg_gen_addi_i64
2600 #define tcg_gen_sub_tl tcg_gen_sub_i64
2601 #define tcg_gen_neg_tl tcg_gen_neg_i64
2602 #define tcg_gen_subfi_tl tcg_gen_subfi_i64
2603 #define tcg_gen_subi_tl tcg_gen_subi_i64
2604 #define tcg_gen_and_tl tcg_gen_and_i64
2605 #define tcg_gen_andi_tl tcg_gen_andi_i64
2606 #define tcg_gen_or_tl tcg_gen_or_i64
2607 #define tcg_gen_ori_tl tcg_gen_ori_i64
2608 #define tcg_gen_xor_tl tcg_gen_xor_i64
2609 #define tcg_gen_xori_tl tcg_gen_xori_i64
2610 #define tcg_gen_not_tl tcg_gen_not_i64
2611 #define tcg_gen_shl_tl tcg_gen_shl_i64
2612 #define tcg_gen_shli_tl tcg_gen_shli_i64
2613 #define tcg_gen_shr_tl tcg_gen_shr_i64
2614 #define tcg_gen_shri_tl tcg_gen_shri_i64
2615 #define tcg_gen_sar_tl tcg_gen_sar_i64
2616 #define tcg_gen_sari_tl tcg_gen_sari_i64
2617 #define tcg_gen_brcond_tl tcg_gen_brcond_i64
2618 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2619 #define tcg_gen_setcond_tl tcg_gen_setcond_i64
2620 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2621 #define tcg_gen_mul_tl tcg_gen_mul_i64
2622 #define tcg_gen_muli_tl tcg_gen_muli_i64
2623 #define tcg_gen_div_tl tcg_gen_div_i64
2624 #define tcg_gen_rem_tl tcg_gen_rem_i64
2625 #define tcg_gen_divu_tl tcg_gen_divu_i64
2626 #define tcg_gen_remu_tl tcg_gen_remu_i64
2627 #define tcg_gen_discard_tl tcg_gen_discard_i64
2628 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2629 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2630 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2631 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2632 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2633 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2634 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2635 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2636 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2637 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2638 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2639 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2640 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2641 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2642 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2643 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2644 #define tcg_gen_extr_i64_tl tcg_gen_extr32_i64
2645 #define tcg_gen_andc_tl tcg_gen_andc_i64
2646 #define tcg_gen_eqv_tl tcg_gen_eqv_i64
2647 #define tcg_gen_nand_tl tcg_gen_nand_i64
2648 #define tcg_gen_nor_tl tcg_gen_nor_i64
2649 #define tcg_gen_orc_tl tcg_gen_orc_i64
2650 #define tcg_gen_rotl_tl tcg_gen_rotl_i64
2651 #define tcg_gen_rotli_tl tcg_gen_rotli_i64
2652 #define tcg_gen_rotr_tl tcg_gen_rotr_i64
2653 #define tcg_gen_rotri_tl tcg_gen_rotri_i64
2654 #define tcg_gen_deposit_tl tcg_gen_deposit_i64
2655 #define tcg_const_tl tcg_const_i64
2656 #define tcg_const_local_tl tcg_const_local_i64
2657 #define tcg_gen_movcond_tl tcg_gen_movcond_i64
2658 #define tcg_gen_add2_tl tcg_gen_add2_i64
2659 #define tcg_gen_sub2_tl tcg_gen_sub2_i64
2660 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i64
2661 #define tcg_gen_muls2_tl tcg_gen_muls2_i64
2663 #define tcg_gen_movi_tl tcg_gen_movi_i32
2664 #define tcg_gen_mov_tl tcg_gen_mov_i32
2665 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2666 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2667 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2668 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2669 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
2670 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
2671 #define tcg_gen_ld_tl tcg_gen_ld_i32
2672 #define tcg_gen_st8_tl tcg_gen_st8_i32
2673 #define tcg_gen_st16_tl tcg_gen_st16_i32
2674 #define tcg_gen_st32_tl tcg_gen_st_i32
2675 #define tcg_gen_st_tl tcg_gen_st_i32
2676 #define tcg_gen_add_tl tcg_gen_add_i32
2677 #define tcg_gen_addi_tl tcg_gen_addi_i32
2678 #define tcg_gen_sub_tl tcg_gen_sub_i32
2679 #define tcg_gen_neg_tl tcg_gen_neg_i32
2680 #define tcg_gen_subfi_tl tcg_gen_subfi_i32
2681 #define tcg_gen_subi_tl tcg_gen_subi_i32
2682 #define tcg_gen_and_tl tcg_gen_and_i32
2683 #define tcg_gen_andi_tl tcg_gen_andi_i32
2684 #define tcg_gen_or_tl tcg_gen_or_i32
2685 #define tcg_gen_ori_tl tcg_gen_ori_i32
2686 #define tcg_gen_xor_tl tcg_gen_xor_i32
2687 #define tcg_gen_xori_tl tcg_gen_xori_i32
2688 #define tcg_gen_not_tl tcg_gen_not_i32
2689 #define tcg_gen_shl_tl tcg_gen_shl_i32
2690 #define tcg_gen_shli_tl tcg_gen_shli_i32
2691 #define tcg_gen_shr_tl tcg_gen_shr_i32
2692 #define tcg_gen_shri_tl tcg_gen_shri_i32
2693 #define tcg_gen_sar_tl tcg_gen_sar_i32
2694 #define tcg_gen_sari_tl tcg_gen_sari_i32
2695 #define tcg_gen_brcond_tl tcg_gen_brcond_i32
2696 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2697 #define tcg_gen_setcond_tl tcg_gen_setcond_i32
2698 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2699 #define tcg_gen_mul_tl tcg_gen_mul_i32
2700 #define tcg_gen_muli_tl tcg_gen_muli_i32
2701 #define tcg_gen_div_tl tcg_gen_div_i32
2702 #define tcg_gen_rem_tl tcg_gen_rem_i32
2703 #define tcg_gen_divu_tl tcg_gen_divu_i32
2704 #define tcg_gen_remu_tl tcg_gen_remu_i32
2705 #define tcg_gen_discard_tl tcg_gen_discard_i32
2706 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2707 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2708 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2709 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2710 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2711 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2712 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2713 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2714 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2715 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2716 #define tcg_gen_ext32u_tl tcg_gen_mov_i32
2717 #define tcg_gen_ext32s_tl tcg_gen_mov_i32
2718 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2719 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2720 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2721 #define tcg_gen_extr_i64_tl tcg_gen_extr_i64_i32
2722 #define tcg_gen_andc_tl tcg_gen_andc_i32
2723 #define tcg_gen_eqv_tl tcg_gen_eqv_i32
2724 #define tcg_gen_nand_tl tcg_gen_nand_i32
2725 #define tcg_gen_nor_tl tcg_gen_nor_i32
2726 #define tcg_gen_orc_tl tcg_gen_orc_i32
2727 #define tcg_gen_rotl_tl tcg_gen_rotl_i32
2728 #define tcg_gen_rotli_tl tcg_gen_rotli_i32
2729 #define tcg_gen_rotr_tl tcg_gen_rotr_i32
2730 #define tcg_gen_rotri_tl tcg_gen_rotri_i32
2731 #define tcg_gen_deposit_tl tcg_gen_deposit_i32
2732 #define tcg_const_tl tcg_const_i32
2733 #define tcg_const_local_tl tcg_const_local_i32
2734 #define tcg_gen_movcond_tl tcg_gen_movcond_i32
2735 #define tcg_gen_add2_tl tcg_gen_add2_i32
2736 #define tcg_gen_sub2_tl tcg_gen_sub2_i32
2737 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i32
2738 #define tcg_gen_muls2_tl tcg_gen_muls2_i32
2741 #if UINTPTR_MAX == UINT32_MAX
2742 # define tcg_gen_ld_ptr(R, A, O) \
2743 tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2744 # define tcg_gen_discard_ptr(A) \
2745 tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2746 # define tcg_gen_add_ptr(R, A, B) \
2747 tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2748 # define tcg_gen_addi_ptr(R, A, B) \
2749 tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2750 # define tcg_gen_ext_i32_ptr(R, A) \
2751 tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
2753 # define tcg_gen_ld_ptr(R, A, O) \
2754 tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2755 # define tcg_gen_discard_ptr(A) \
2756 tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2757 # define tcg_gen_add_ptr(R, A, B) \
2758 tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2759 # define tcg_gen_addi_ptr(R, A, B) \
2760 tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2761 # define tcg_gen_ext_i32_ptr(R, A) \
2762 tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
2763 #endif /* UINTPTR_MAX == UINT32_MAX */