2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 int gen_new_label(void);
28 static inline void tcg_gen_op1_i32(TCGOpcode opc
, TCGv_i32 arg1
)
31 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
34 static inline void tcg_gen_op1_i64(TCGOpcode opc
, TCGv_i64 arg1
)
37 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
40 static inline void tcg_gen_op1i(TCGOpcode opc
, TCGArg arg1
)
43 *gen_opparam_ptr
++ = arg1
;
46 static inline void tcg_gen_op2_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
)
49 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
50 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
53 static inline void tcg_gen_op2_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
)
56 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
57 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
60 static inline void tcg_gen_op2i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGArg arg2
)
63 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
64 *gen_opparam_ptr
++ = arg2
;
67 static inline void tcg_gen_op2i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGArg arg2
)
70 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
71 *gen_opparam_ptr
++ = arg2
;
74 static inline void tcg_gen_op2ii(TCGOpcode opc
, TCGArg arg1
, TCGArg arg2
)
77 *gen_opparam_ptr
++ = arg1
;
78 *gen_opparam_ptr
++ = arg2
;
81 static inline void tcg_gen_op3_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
85 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
86 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
87 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
90 static inline void tcg_gen_op3_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
94 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
95 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
96 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
99 static inline void tcg_gen_op3i_i32(TCGOpcode opc
, TCGv_i32 arg1
,
100 TCGv_i32 arg2
, TCGArg arg3
)
102 *gen_opc_ptr
++ = opc
;
103 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
104 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
105 *gen_opparam_ptr
++ = arg3
;
108 static inline void tcg_gen_op3i_i64(TCGOpcode opc
, TCGv_i64 arg1
,
109 TCGv_i64 arg2
, TCGArg arg3
)
111 *gen_opc_ptr
++ = opc
;
112 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
113 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
114 *gen_opparam_ptr
++ = arg3
;
117 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc
, TCGv_i32 val
,
118 TCGv_ptr base
, TCGArg offset
)
120 *gen_opc_ptr
++ = opc
;
121 *gen_opparam_ptr
++ = GET_TCGV_I32(val
);
122 *gen_opparam_ptr
++ = GET_TCGV_PTR(base
);
123 *gen_opparam_ptr
++ = offset
;
126 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc
, TCGv_i64 val
,
127 TCGv_ptr base
, TCGArg offset
)
129 *gen_opc_ptr
++ = opc
;
130 *gen_opparam_ptr
++ = GET_TCGV_I64(val
);
131 *gen_opparam_ptr
++ = GET_TCGV_PTR(base
);
132 *gen_opparam_ptr
++ = offset
;
135 static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc
, TCGv_i64 val
,
136 TCGv_i32 addr
, TCGArg mem_index
)
138 *gen_opc_ptr
++ = opc
;
139 *gen_opparam_ptr
++ = GET_TCGV_I64(val
);
140 *gen_opparam_ptr
++ = GET_TCGV_I32(addr
);
141 *gen_opparam_ptr
++ = mem_index
;
144 static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc
, TCGv_i64 val
,
145 TCGv_i64 addr
, TCGArg mem_index
)
147 *gen_opc_ptr
++ = opc
;
148 *gen_opparam_ptr
++ = GET_TCGV_I64(val
);
149 *gen_opparam_ptr
++ = GET_TCGV_I64(addr
);
150 *gen_opparam_ptr
++ = mem_index
;
153 static inline void tcg_gen_op4_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
154 TCGv_i32 arg3
, TCGv_i32 arg4
)
156 *gen_opc_ptr
++ = opc
;
157 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
158 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
159 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
160 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
163 static inline void tcg_gen_op4_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
164 TCGv_i64 arg3
, TCGv_i64 arg4
)
166 *gen_opc_ptr
++ = opc
;
167 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
168 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
169 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
170 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
173 static inline void tcg_gen_op4i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
174 TCGv_i32 arg3
, TCGArg arg4
)
176 *gen_opc_ptr
++ = opc
;
177 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
178 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
179 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
180 *gen_opparam_ptr
++ = arg4
;
183 static inline void tcg_gen_op4i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
184 TCGv_i64 arg3
, TCGArg arg4
)
186 *gen_opc_ptr
++ = opc
;
187 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
188 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
189 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
190 *gen_opparam_ptr
++ = arg4
;
193 static inline void tcg_gen_op4ii_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
194 TCGArg arg3
, TCGArg arg4
)
196 *gen_opc_ptr
++ = opc
;
197 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
198 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
199 *gen_opparam_ptr
++ = arg3
;
200 *gen_opparam_ptr
++ = arg4
;
203 static inline void tcg_gen_op4ii_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
204 TCGArg arg3
, TCGArg arg4
)
206 *gen_opc_ptr
++ = opc
;
207 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
208 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
209 *gen_opparam_ptr
++ = arg3
;
210 *gen_opparam_ptr
++ = arg4
;
213 static inline void tcg_gen_op5_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
214 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGv_i32 arg5
)
216 *gen_opc_ptr
++ = opc
;
217 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
218 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
219 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
220 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
221 *gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
224 static inline void tcg_gen_op5_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
225 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGv_i64 arg5
)
227 *gen_opc_ptr
++ = opc
;
228 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
229 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
230 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
231 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
232 *gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
235 static inline void tcg_gen_op5i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
236 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGArg arg5
)
238 *gen_opc_ptr
++ = opc
;
239 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
240 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
241 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
242 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
243 *gen_opparam_ptr
++ = arg5
;
246 static inline void tcg_gen_op5i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
247 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGArg arg5
)
249 *gen_opc_ptr
++ = opc
;
250 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
251 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
252 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
253 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
254 *gen_opparam_ptr
++ = arg5
;
257 static inline void tcg_gen_op5ii_i32(TCGOpcode opc
, TCGv_i32 arg1
,
258 TCGv_i32 arg2
, TCGv_i32 arg3
,
259 TCGArg arg4
, TCGArg arg5
)
261 *gen_opc_ptr
++ = opc
;
262 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
263 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
264 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
265 *gen_opparam_ptr
++ = arg4
;
266 *gen_opparam_ptr
++ = arg5
;
269 static inline void tcg_gen_op5ii_i64(TCGOpcode opc
, TCGv_i64 arg1
,
270 TCGv_i64 arg2
, TCGv_i64 arg3
,
271 TCGArg arg4
, TCGArg arg5
)
273 *gen_opc_ptr
++ = opc
;
274 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
275 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
276 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
277 *gen_opparam_ptr
++ = arg4
;
278 *gen_opparam_ptr
++ = arg5
;
281 static inline void tcg_gen_op6_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
282 TCGv_i32 arg3
, TCGv_i32 arg4
, TCGv_i32 arg5
,
285 *gen_opc_ptr
++ = opc
;
286 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
287 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
288 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
289 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
290 *gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
291 *gen_opparam_ptr
++ = GET_TCGV_I32(arg6
);
294 static inline void tcg_gen_op6_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
295 TCGv_i64 arg3
, TCGv_i64 arg4
, TCGv_i64 arg5
,
298 *gen_opc_ptr
++ = opc
;
299 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
300 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
301 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
302 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
303 *gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
304 *gen_opparam_ptr
++ = GET_TCGV_I64(arg6
);
307 static inline void tcg_gen_op6i_i32(TCGOpcode opc
, TCGv_i32 arg1
, TCGv_i32 arg2
,
308 TCGv_i32 arg3
, TCGv_i32 arg4
,
309 TCGv_i32 arg5
, TCGArg arg6
)
311 *gen_opc_ptr
++ = opc
;
312 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
313 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
314 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
315 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
316 *gen_opparam_ptr
++ = GET_TCGV_I32(arg5
);
317 *gen_opparam_ptr
++ = arg6
;
320 static inline void tcg_gen_op6i_i64(TCGOpcode opc
, TCGv_i64 arg1
, TCGv_i64 arg2
,
321 TCGv_i64 arg3
, TCGv_i64 arg4
,
322 TCGv_i64 arg5
, TCGArg arg6
)
324 *gen_opc_ptr
++ = opc
;
325 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
326 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
327 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
328 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
329 *gen_opparam_ptr
++ = GET_TCGV_I64(arg5
);
330 *gen_opparam_ptr
++ = arg6
;
333 static inline void tcg_gen_op6ii_i32(TCGOpcode opc
, TCGv_i32 arg1
,
334 TCGv_i32 arg2
, TCGv_i32 arg3
,
335 TCGv_i32 arg4
, TCGArg arg5
, TCGArg arg6
)
337 *gen_opc_ptr
++ = opc
;
338 *gen_opparam_ptr
++ = GET_TCGV_I32(arg1
);
339 *gen_opparam_ptr
++ = GET_TCGV_I32(arg2
);
340 *gen_opparam_ptr
++ = GET_TCGV_I32(arg3
);
341 *gen_opparam_ptr
++ = GET_TCGV_I32(arg4
);
342 *gen_opparam_ptr
++ = arg5
;
343 *gen_opparam_ptr
++ = arg6
;
346 static inline void tcg_gen_op6ii_i64(TCGOpcode opc
, TCGv_i64 arg1
,
347 TCGv_i64 arg2
, TCGv_i64 arg3
,
348 TCGv_i64 arg4
, TCGArg arg5
, TCGArg arg6
)
350 *gen_opc_ptr
++ = opc
;
351 *gen_opparam_ptr
++ = GET_TCGV_I64(arg1
);
352 *gen_opparam_ptr
++ = GET_TCGV_I64(arg2
);
353 *gen_opparam_ptr
++ = GET_TCGV_I64(arg3
);
354 *gen_opparam_ptr
++ = GET_TCGV_I64(arg4
);
355 *gen_opparam_ptr
++ = arg5
;
356 *gen_opparam_ptr
++ = arg6
;
359 static inline void gen_set_label(int n
)
361 tcg_gen_op1i(INDEX_op_set_label
, n
);
364 static inline void tcg_gen_br(int label
)
366 tcg_gen_op1i(INDEX_op_br
, label
);
369 static inline void tcg_gen_mov_i32(TCGv_i32 ret
, TCGv_i32 arg
)
371 if (!TCGV_EQUAL_I32(ret
, arg
))
372 tcg_gen_op2_i32(INDEX_op_mov_i32
, ret
, arg
);
375 static inline void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
377 tcg_gen_op2i_i32(INDEX_op_movi_i32
, ret
, arg
);
380 /* A version of dh_sizemask from def-helper.h that doesn't rely on
381 preprocessor magic. */
382 static inline int tcg_gen_sizemask(int n
, int is_64bit
, int is_signed
)
384 return (is_64bit
<< n
*2) | (is_signed
<< (n
*2 + 1));
388 static inline void tcg_gen_helperN(void *func
, int flags
, int sizemask
,
389 TCGArg ret
, int nargs
, TCGArg
*args
)
392 fn
= tcg_const_ptr(func
);
393 tcg_gen_callN(&tcg_ctx
, fn
, flags
, sizemask
, ret
,
395 tcg_temp_free_ptr(fn
);
398 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently
399 reserved for helpers in tcg-runtime.c. These helpers are all const
400 and pure, hence the call to tcg_gen_callN() with TCG_CALL_CONST |
401 TCG_CALL_PURE. This may need to be adjusted if these functions
402 start to be used with other helpers. */
403 static inline void tcg_gen_helper32(void *func
, int sizemask
, TCGv_i32 ret
,
404 TCGv_i32 a
, TCGv_i32 b
)
408 fn
= tcg_const_ptr(func
);
409 args
[0] = GET_TCGV_I32(a
);
410 args
[1] = GET_TCGV_I32(b
);
411 tcg_gen_callN(&tcg_ctx
, fn
, TCG_CALL_CONST
| TCG_CALL_PURE
, sizemask
,
412 GET_TCGV_I32(ret
), 2, args
);
413 tcg_temp_free_ptr(fn
);
416 static inline void tcg_gen_helper64(void *func
, int sizemask
, TCGv_i64 ret
,
417 TCGv_i64 a
, TCGv_i64 b
)
421 fn
= tcg_const_ptr(func
);
422 args
[0] = GET_TCGV_I64(a
);
423 args
[1] = GET_TCGV_I64(b
);
424 tcg_gen_callN(&tcg_ctx
, fn
, TCG_CALL_CONST
| TCG_CALL_PURE
, sizemask
,
425 GET_TCGV_I64(ret
), 2, args
);
426 tcg_temp_free_ptr(fn
);
431 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
433 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32
, ret
, arg2
, offset
);
436 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
438 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32
, ret
, arg2
, offset
);
441 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
443 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32
, ret
, arg2
, offset
);
446 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
448 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32
, ret
, arg2
, offset
);
451 static inline void tcg_gen_ld_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
453 tcg_gen_ldst_op_i32(INDEX_op_ld_i32
, ret
, arg2
, offset
);
456 static inline void tcg_gen_st8_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
458 tcg_gen_ldst_op_i32(INDEX_op_st8_i32
, arg1
, arg2
, offset
);
461 static inline void tcg_gen_st16_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
463 tcg_gen_ldst_op_i32(INDEX_op_st16_i32
, arg1
, arg2
, offset
);
466 static inline void tcg_gen_st_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
468 tcg_gen_ldst_op_i32(INDEX_op_st_i32
, arg1
, arg2
, offset
);
471 static inline void tcg_gen_add_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
473 tcg_gen_op3_i32(INDEX_op_add_i32
, ret
, arg1
, arg2
);
476 static inline void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
478 /* some cases can be optimized here */
480 tcg_gen_mov_i32(ret
, arg1
);
482 TCGv_i32 t0
= tcg_const_i32(arg2
);
483 tcg_gen_add_i32(ret
, arg1
, t0
);
484 tcg_temp_free_i32(t0
);
488 static inline void tcg_gen_sub_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
490 tcg_gen_op3_i32(INDEX_op_sub_i32
, ret
, arg1
, arg2
);
493 static inline void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
495 TCGv_i32 t0
= tcg_const_i32(arg1
);
496 tcg_gen_sub_i32(ret
, t0
, arg2
);
497 tcg_temp_free_i32(t0
);
500 static inline void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
502 /* some cases can be optimized here */
504 tcg_gen_mov_i32(ret
, arg1
);
506 TCGv_i32 t0
= tcg_const_i32(arg2
);
507 tcg_gen_sub_i32(ret
, arg1
, t0
);
508 tcg_temp_free_i32(t0
);
512 static inline void tcg_gen_and_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
514 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
515 tcg_gen_mov_i32(ret
, arg1
);
517 tcg_gen_op3_i32(INDEX_op_and_i32
, ret
, arg1
, arg2
);
521 static inline void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
524 /* Some cases can be optimized here. */
527 tcg_gen_movi_i32(ret
, 0);
530 tcg_gen_mov_i32(ret
, arg1
);
533 /* Don't recurse with tcg_gen_ext8u_i32. */
534 if (TCG_TARGET_HAS_ext8u_i32
) {
535 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
540 if (TCG_TARGET_HAS_ext16u_i32
) {
541 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
546 t0
= tcg_const_i32(arg2
);
547 tcg_gen_and_i32(ret
, arg1
, t0
);
548 tcg_temp_free_i32(t0
);
551 static inline void tcg_gen_or_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
553 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
554 tcg_gen_mov_i32(ret
, arg1
);
556 tcg_gen_op3_i32(INDEX_op_or_i32
, ret
, arg1
, arg2
);
560 static inline void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
562 /* Some cases can be optimized here. */
564 tcg_gen_movi_i32(ret
, -1);
565 } else if (arg2
== 0) {
566 tcg_gen_mov_i32(ret
, arg1
);
568 TCGv_i32 t0
= tcg_const_i32(arg2
);
569 tcg_gen_or_i32(ret
, arg1
, t0
);
570 tcg_temp_free_i32(t0
);
574 static inline void tcg_gen_xor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
576 if (TCGV_EQUAL_I32(arg1
, arg2
)) {
577 tcg_gen_movi_i32(ret
, 0);
579 tcg_gen_op3_i32(INDEX_op_xor_i32
, ret
, arg1
, arg2
);
583 static inline void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
585 /* Some cases can be optimized here. */
587 tcg_gen_mov_i32(ret
, arg1
);
588 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
589 /* Don't recurse with tcg_gen_not_i32. */
590 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
592 TCGv_i32 t0
= tcg_const_i32(arg2
);
593 tcg_gen_xor_i32(ret
, arg1
, t0
);
594 tcg_temp_free_i32(t0
);
598 static inline void tcg_gen_shl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
600 tcg_gen_op3_i32(INDEX_op_shl_i32
, ret
, arg1
, arg2
);
603 static inline void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
606 tcg_gen_mov_i32(ret
, arg1
);
608 TCGv_i32 t0
= tcg_const_i32(arg2
);
609 tcg_gen_shl_i32(ret
, arg1
, t0
);
610 tcg_temp_free_i32(t0
);
614 static inline void tcg_gen_shr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
616 tcg_gen_op3_i32(INDEX_op_shr_i32
, ret
, arg1
, arg2
);
619 static inline void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
622 tcg_gen_mov_i32(ret
, arg1
);
624 TCGv_i32 t0
= tcg_const_i32(arg2
);
625 tcg_gen_shr_i32(ret
, arg1
, t0
);
626 tcg_temp_free_i32(t0
);
630 static inline void tcg_gen_sar_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
632 tcg_gen_op3_i32(INDEX_op_sar_i32
, ret
, arg1
, arg2
);
635 static inline void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
638 tcg_gen_mov_i32(ret
, arg1
);
640 TCGv_i32 t0
= tcg_const_i32(arg2
);
641 tcg_gen_sar_i32(ret
, arg1
, t0
);
642 tcg_temp_free_i32(t0
);
646 static inline void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
,
647 TCGv_i32 arg2
, int label_index
)
649 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_index
);
652 static inline void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
,
653 int32_t arg2
, int label_index
)
655 TCGv_i32 t0
= tcg_const_i32(arg2
);
656 tcg_gen_brcond_i32(cond
, arg1
, t0
, label_index
);
657 tcg_temp_free_i32(t0
);
660 static inline void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
661 TCGv_i32 arg1
, TCGv_i32 arg2
)
663 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
666 static inline void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
667 TCGv_i32 arg1
, int32_t arg2
)
669 TCGv_i32 t0
= tcg_const_i32(arg2
);
670 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
671 tcg_temp_free_i32(t0
);
674 static inline void tcg_gen_mul_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
676 tcg_gen_op3_i32(INDEX_op_mul_i32
, ret
, arg1
, arg2
);
679 static inline void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
681 TCGv_i32 t0
= tcg_const_i32(arg2
);
682 tcg_gen_mul_i32(ret
, arg1
, t0
);
683 tcg_temp_free_i32(t0
);
686 static inline void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
688 if (TCG_TARGET_HAS_div_i32
) {
689 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
690 } else if (TCG_TARGET_HAS_div2_i32
) {
691 TCGv_i32 t0
= tcg_temp_new_i32();
692 tcg_gen_sari_i32(t0
, arg1
, 31);
693 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
694 tcg_temp_free_i32(t0
);
697 /* Return value and both arguments are 32-bit and signed. */
698 sizemask
|= tcg_gen_sizemask(0, 0, 1);
699 sizemask
|= tcg_gen_sizemask(1, 0, 1);
700 sizemask
|= tcg_gen_sizemask(2, 0, 1);
701 tcg_gen_helper32(tcg_helper_div_i32
, sizemask
, ret
, arg1
, arg2
);
705 static inline void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
707 if (TCG_TARGET_HAS_div_i32
) {
708 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
709 } else if (TCG_TARGET_HAS_div2_i32
) {
710 TCGv_i32 t0
= tcg_temp_new_i32();
711 tcg_gen_sari_i32(t0
, arg1
, 31);
712 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
713 tcg_temp_free_i32(t0
);
716 /* Return value and both arguments are 32-bit and signed. */
717 sizemask
|= tcg_gen_sizemask(0, 0, 1);
718 sizemask
|= tcg_gen_sizemask(1, 0, 1);
719 sizemask
|= tcg_gen_sizemask(2, 0, 1);
720 tcg_gen_helper32(tcg_helper_rem_i32
, sizemask
, ret
, arg1
, arg2
);
724 static inline void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
726 if (TCG_TARGET_HAS_div_i32
) {
727 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
728 } else if (TCG_TARGET_HAS_div2_i32
) {
729 TCGv_i32 t0
= tcg_temp_new_i32();
730 tcg_gen_movi_i32(t0
, 0);
731 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
732 tcg_temp_free_i32(t0
);
735 /* Return value and both arguments are 32-bit and unsigned. */
736 sizemask
|= tcg_gen_sizemask(0, 0, 0);
737 sizemask
|= tcg_gen_sizemask(1, 0, 0);
738 sizemask
|= tcg_gen_sizemask(2, 0, 0);
739 tcg_gen_helper32(tcg_helper_divu_i32
, sizemask
, ret
, arg1
, arg2
);
743 static inline void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
745 if (TCG_TARGET_HAS_div_i32
) {
746 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
747 } else if (TCG_TARGET_HAS_div2_i32
) {
748 TCGv_i32 t0
= tcg_temp_new_i32();
749 tcg_gen_movi_i32(t0
, 0);
750 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
751 tcg_temp_free_i32(t0
);
754 /* Return value and both arguments are 32-bit and unsigned. */
755 sizemask
|= tcg_gen_sizemask(0, 0, 0);
756 sizemask
|= tcg_gen_sizemask(1, 0, 0);
757 sizemask
|= tcg_gen_sizemask(2, 0, 0);
758 tcg_gen_helper32(tcg_helper_remu_i32
, sizemask
, ret
, arg1
, arg2
);
762 #if TCG_TARGET_REG_BITS == 32
764 static inline void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
766 if (!TCGV_EQUAL_I64(ret
, arg
)) {
767 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
768 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
772 static inline void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
774 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
775 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
778 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
779 tcg_target_long offset
)
781 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
782 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
785 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
786 tcg_target_long offset
)
788 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
789 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), 31);
792 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
793 tcg_target_long offset
)
795 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
796 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
799 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
800 tcg_target_long offset
)
802 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
803 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
806 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
807 tcg_target_long offset
)
809 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
810 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
813 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
814 tcg_target_long offset
)
816 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
817 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
820 static inline void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
821 tcg_target_long offset
)
823 /* since arg2 and ret have different types, they cannot be the
825 #ifdef TCG_TARGET_WORDS_BIGENDIAN
826 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
827 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
829 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
830 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
834 static inline void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
835 tcg_target_long offset
)
837 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
840 static inline void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
841 tcg_target_long offset
)
843 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
846 static inline void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
847 tcg_target_long offset
)
849 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
852 static inline void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
853 tcg_target_long offset
)
855 #ifdef TCG_TARGET_WORDS_BIGENDIAN
856 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
857 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
859 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
860 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
864 static inline void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
866 tcg_gen_op6_i32(INDEX_op_add2_i32
, TCGV_LOW(ret
), TCGV_HIGH(ret
),
867 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
871 static inline void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
873 tcg_gen_op6_i32(INDEX_op_sub2_i32
, TCGV_LOW(ret
), TCGV_HIGH(ret
),
874 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
878 static inline void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
880 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
881 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
884 static inline void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
886 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
887 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
890 static inline void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
892 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
893 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
896 static inline void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
898 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
899 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
902 static inline void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
904 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
905 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
908 static inline void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
910 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
911 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
914 /* XXX: use generic code when basic block handling is OK or CPU
915 specific code (x86) */
916 static inline void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
919 /* Return value and both arguments are 64-bit and signed. */
920 sizemask
|= tcg_gen_sizemask(0, 1, 1);
921 sizemask
|= tcg_gen_sizemask(1, 1, 1);
922 sizemask
|= tcg_gen_sizemask(2, 1, 1);
924 tcg_gen_helper64(tcg_helper_shl_i64
, sizemask
, ret
, arg1
, arg2
);
927 static inline void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
929 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
932 static inline void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
935 /* Return value and both arguments are 64-bit and signed. */
936 sizemask
|= tcg_gen_sizemask(0, 1, 1);
937 sizemask
|= tcg_gen_sizemask(1, 1, 1);
938 sizemask
|= tcg_gen_sizemask(2, 1, 1);
940 tcg_gen_helper64(tcg_helper_shr_i64
, sizemask
, ret
, arg1
, arg2
);
943 static inline void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
945 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
948 static inline void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
951 /* Return value and both arguments are 64-bit and signed. */
952 sizemask
|= tcg_gen_sizemask(0, 1, 1);
953 sizemask
|= tcg_gen_sizemask(1, 1, 1);
954 sizemask
|= tcg_gen_sizemask(2, 1, 1);
956 tcg_gen_helper64(tcg_helper_sar_i64
, sizemask
, ret
, arg1
, arg2
);
959 static inline void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
961 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
964 static inline void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
,
965 TCGv_i64 arg2
, int label_index
)
967 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
968 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
969 TCGV_HIGH(arg2
), cond
, label_index
);
972 static inline void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
973 TCGv_i64 arg1
, TCGv_i64 arg2
)
975 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
976 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
977 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
978 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
981 static inline void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
986 t0
= tcg_temp_new_i64();
987 t1
= tcg_temp_new_i32();
989 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, TCGV_LOW(t0
), TCGV_HIGH(t0
),
990 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
992 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
993 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
994 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
995 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
997 tcg_gen_mov_i64(ret
, t0
);
998 tcg_temp_free_i64(t0
);
999 tcg_temp_free_i32(t1
);
1002 static inline void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1005 /* Return value and both arguments are 64-bit and signed. */
1006 sizemask
|= tcg_gen_sizemask(0, 1, 1);
1007 sizemask
|= tcg_gen_sizemask(1, 1, 1);
1008 sizemask
|= tcg_gen_sizemask(2, 1, 1);
1010 tcg_gen_helper64(tcg_helper_div_i64
, sizemask
, ret
, arg1
, arg2
);
1013 static inline void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1016 /* Return value and both arguments are 64-bit and signed. */
1017 sizemask
|= tcg_gen_sizemask(0, 1, 1);
1018 sizemask
|= tcg_gen_sizemask(1, 1, 1);
1019 sizemask
|= tcg_gen_sizemask(2, 1, 1);
1021 tcg_gen_helper64(tcg_helper_rem_i64
, sizemask
, ret
, arg1
, arg2
);
1024 static inline void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1027 /* Return value and both arguments are 64-bit and unsigned. */
1028 sizemask
|= tcg_gen_sizemask(0, 1, 0);
1029 sizemask
|= tcg_gen_sizemask(1, 1, 0);
1030 sizemask
|= tcg_gen_sizemask(2, 1, 0);
1032 tcg_gen_helper64(tcg_helper_divu_i64
, sizemask
, ret
, arg1
, arg2
);
1035 static inline void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1038 /* Return value and both arguments are 64-bit and unsigned. */
1039 sizemask
|= tcg_gen_sizemask(0, 1, 0);
1040 sizemask
|= tcg_gen_sizemask(1, 1, 0);
1041 sizemask
|= tcg_gen_sizemask(2, 1, 0);
1043 tcg_gen_helper64(tcg_helper_remu_i64
, sizemask
, ret
, arg1
, arg2
);
1048 static inline void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1050 if (!TCGV_EQUAL_I64(ret
, arg
))
1051 tcg_gen_op2_i64(INDEX_op_mov_i64
, ret
, arg
);
1054 static inline void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1056 tcg_gen_op2i_i64(INDEX_op_movi_i64
, ret
, arg
);
1059 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1060 tcg_target_long offset
)
1062 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64
, ret
, arg2
, offset
);
1065 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1066 tcg_target_long offset
)
1068 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64
, ret
, arg2
, offset
);
1071 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1072 tcg_target_long offset
)
1074 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64
, ret
, arg2
, offset
);
1077 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1078 tcg_target_long offset
)
1080 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64
, ret
, arg2
, offset
);
1083 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1084 tcg_target_long offset
)
1086 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64
, ret
, arg2
, offset
);
1089 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
,
1090 tcg_target_long offset
)
1092 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64
, ret
, arg2
, offset
);
1095 static inline void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1097 tcg_gen_ldst_op_i64(INDEX_op_ld_i64
, ret
, arg2
, offset
);
1100 static inline void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1101 tcg_target_long offset
)
1103 tcg_gen_ldst_op_i64(INDEX_op_st8_i64
, arg1
, arg2
, offset
);
1106 static inline void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1107 tcg_target_long offset
)
1109 tcg_gen_ldst_op_i64(INDEX_op_st16_i64
, arg1
, arg2
, offset
);
1112 static inline void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
,
1113 tcg_target_long offset
)
1115 tcg_gen_ldst_op_i64(INDEX_op_st32_i64
, arg1
, arg2
, offset
);
1118 static inline void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1120 tcg_gen_ldst_op_i64(INDEX_op_st_i64
, arg1
, arg2
, offset
);
1123 static inline void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1125 tcg_gen_op3_i64(INDEX_op_add_i64
, ret
, arg1
, arg2
);
1128 static inline void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1130 tcg_gen_op3_i64(INDEX_op_sub_i64
, ret
, arg1
, arg2
);
1133 static inline void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1135 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1136 tcg_gen_mov_i64(ret
, arg1
);
1138 tcg_gen_op3_i64(INDEX_op_and_i64
, ret
, arg1
, arg2
);
1142 static inline void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1145 /* Some cases can be optimized here. */
1148 tcg_gen_movi_i64(ret
, 0);
1150 case 0xffffffffffffffffull
:
1151 tcg_gen_mov_i64(ret
, arg1
);
1154 /* Don't recurse with tcg_gen_ext8u_i32. */
1155 if (TCG_TARGET_HAS_ext8u_i64
) {
1156 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1161 if (TCG_TARGET_HAS_ext16u_i64
) {
1162 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1167 if (TCG_TARGET_HAS_ext32u_i64
) {
1168 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1173 t0
= tcg_const_i64(arg2
);
1174 tcg_gen_and_i64(ret
, arg1
, t0
);
1175 tcg_temp_free_i64(t0
);
1178 static inline void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1180 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1181 tcg_gen_mov_i64(ret
, arg1
);
1183 tcg_gen_op3_i64(INDEX_op_or_i64
, ret
, arg1
, arg2
);
1187 static inline void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1189 /* Some cases can be optimized here. */
1191 tcg_gen_movi_i64(ret
, -1);
1192 } else if (arg2
== 0) {
1193 tcg_gen_mov_i64(ret
, arg1
);
1195 TCGv_i64 t0
= tcg_const_i64(arg2
);
1196 tcg_gen_or_i64(ret
, arg1
, t0
);
1197 tcg_temp_free_i64(t0
);
1201 static inline void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1203 if (TCGV_EQUAL_I64(arg1
, arg2
)) {
1204 tcg_gen_movi_i64(ret
, 0);
1206 tcg_gen_op3_i64(INDEX_op_xor_i64
, ret
, arg1
, arg2
);
1210 static inline void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1212 /* Some cases can be optimized here. */
1214 tcg_gen_mov_i64(ret
, arg1
);
1215 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1216 /* Don't recurse with tcg_gen_not_i64. */
1217 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1219 TCGv_i64 t0
= tcg_const_i64(arg2
);
1220 tcg_gen_xor_i64(ret
, arg1
, t0
);
1221 tcg_temp_free_i64(t0
);
1225 static inline void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1227 tcg_gen_op3_i64(INDEX_op_shl_i64
, ret
, arg1
, arg2
);
1230 static inline void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1233 tcg_gen_mov_i64(ret
, arg1
);
1235 TCGv_i64 t0
= tcg_const_i64(arg2
);
1236 tcg_gen_shl_i64(ret
, arg1
, t0
);
1237 tcg_temp_free_i64(t0
);
1241 static inline void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1243 tcg_gen_op3_i64(INDEX_op_shr_i64
, ret
, arg1
, arg2
);
1246 static inline void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1249 tcg_gen_mov_i64(ret
, arg1
);
1251 TCGv_i64 t0
= tcg_const_i64(arg2
);
1252 tcg_gen_shr_i64(ret
, arg1
, t0
);
1253 tcg_temp_free_i64(t0
);
1257 static inline void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1259 tcg_gen_op3_i64(INDEX_op_sar_i64
, ret
, arg1
, arg2
);
1262 static inline void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1265 tcg_gen_mov_i64(ret
, arg1
);
1267 TCGv_i64 t0
= tcg_const_i64(arg2
);
1268 tcg_gen_sar_i64(ret
, arg1
, t0
);
1269 tcg_temp_free_i64(t0
);
1273 static inline void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
,
1274 TCGv_i64 arg2
, int label_index
)
1276 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
, label_index
);
1279 static inline void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1280 TCGv_i64 arg1
, TCGv_i64 arg2
)
1282 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1285 static inline void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1287 tcg_gen_op3_i64(INDEX_op_mul_i64
, ret
, arg1
, arg2
);
1290 static inline void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1292 if (TCG_TARGET_HAS_div_i64
) {
1293 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1294 } else if (TCG_TARGET_HAS_div2_i64
) {
1295 TCGv_i64 t0
= tcg_temp_new_i64();
1296 tcg_gen_sari_i64(t0
, arg1
, 63);
1297 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1298 tcg_temp_free_i64(t0
);
1301 /* Return value and both arguments are 64-bit and signed. */
1302 sizemask
|= tcg_gen_sizemask(0, 1, 1);
1303 sizemask
|= tcg_gen_sizemask(1, 1, 1);
1304 sizemask
|= tcg_gen_sizemask(2, 1, 1);
1305 tcg_gen_helper64(tcg_helper_div_i64
, sizemask
, ret
, arg1
, arg2
);
1309 static inline void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1311 if (TCG_TARGET_HAS_div_i64
) {
1312 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1313 } else if (TCG_TARGET_HAS_div2_i64
) {
1314 TCGv_i64 t0
= tcg_temp_new_i64();
1315 tcg_gen_sari_i64(t0
, arg1
, 63);
1316 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1317 tcg_temp_free_i64(t0
);
1320 /* Return value and both arguments are 64-bit and signed. */
1321 sizemask
|= tcg_gen_sizemask(0, 1, 1);
1322 sizemask
|= tcg_gen_sizemask(1, 1, 1);
1323 sizemask
|= tcg_gen_sizemask(2, 1, 1);
1324 tcg_gen_helper64(tcg_helper_rem_i64
, sizemask
, ret
, arg1
, arg2
);
1328 static inline void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1330 if (TCG_TARGET_HAS_div_i64
) {
1331 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1332 } else if (TCG_TARGET_HAS_div2_i64
) {
1333 TCGv_i64 t0
= tcg_temp_new_i64();
1334 tcg_gen_movi_i64(t0
, 0);
1335 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1336 tcg_temp_free_i64(t0
);
1339 /* Return value and both arguments are 64-bit and unsigned. */
1340 sizemask
|= tcg_gen_sizemask(0, 1, 0);
1341 sizemask
|= tcg_gen_sizemask(1, 1, 0);
1342 sizemask
|= tcg_gen_sizemask(2, 1, 0);
1343 tcg_gen_helper64(tcg_helper_divu_i64
, sizemask
, ret
, arg1
, arg2
);
1347 static inline void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1349 if (TCG_TARGET_HAS_div_i64
) {
1350 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1351 } else if (TCG_TARGET_HAS_div2_i64
) {
1352 TCGv_i64 t0
= tcg_temp_new_i64();
1353 tcg_gen_movi_i64(t0
, 0);
1354 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1355 tcg_temp_free_i64(t0
);
1358 /* Return value and both arguments are 64-bit and unsigned. */
1359 sizemask
|= tcg_gen_sizemask(0, 1, 0);
1360 sizemask
|= tcg_gen_sizemask(1, 1, 0);
1361 sizemask
|= tcg_gen_sizemask(2, 1, 0);
1362 tcg_gen_helper64(tcg_helper_remu_i64
, sizemask
, ret
, arg1
, arg2
);
1365 #endif /* TCG_TARGET_REG_BITS == 32 */
1367 static inline void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1369 /* some cases can be optimized here */
1371 tcg_gen_mov_i64(ret
, arg1
);
1373 TCGv_i64 t0
= tcg_const_i64(arg2
);
1374 tcg_gen_add_i64(ret
, arg1
, t0
);
1375 tcg_temp_free_i64(t0
);
1379 static inline void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1381 TCGv_i64 t0
= tcg_const_i64(arg1
);
1382 tcg_gen_sub_i64(ret
, t0
, arg2
);
1383 tcg_temp_free_i64(t0
);
1386 static inline void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1388 /* some cases can be optimized here */
1390 tcg_gen_mov_i64(ret
, arg1
);
1392 TCGv_i64 t0
= tcg_const_i64(arg2
);
1393 tcg_gen_sub_i64(ret
, arg1
, t0
);
1394 tcg_temp_free_i64(t0
);
1397 static inline void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
,
1398 int64_t arg2
, int label_index
)
1400 TCGv_i64 t0
= tcg_const_i64(arg2
);
1401 tcg_gen_brcond_i64(cond
, arg1
, t0
, label_index
);
1402 tcg_temp_free_i64(t0
);
1405 static inline void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1406 TCGv_i64 arg1
, int64_t arg2
)
1408 TCGv_i64 t0
= tcg_const_i64(arg2
);
1409 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1410 tcg_temp_free_i64(t0
);
1413 static inline void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1415 TCGv_i64 t0
= tcg_const_i64(arg2
);
1416 tcg_gen_mul_i64(ret
, arg1
, t0
);
1417 tcg_temp_free_i64(t0
);
1421 /***************************************/
1422 /* optional operations */
1424 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1426 if (TCG_TARGET_HAS_ext8s_i32
) {
1427 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1429 tcg_gen_shli_i32(ret
, arg
, 24);
1430 tcg_gen_sari_i32(ret
, ret
, 24);
1434 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1436 if (TCG_TARGET_HAS_ext16s_i32
) {
1437 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1439 tcg_gen_shli_i32(ret
, arg
, 16);
1440 tcg_gen_sari_i32(ret
, ret
, 16);
1444 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1446 if (TCG_TARGET_HAS_ext8u_i32
) {
1447 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1449 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1453 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1455 if (TCG_TARGET_HAS_ext16u_i32
) {
1456 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1458 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1462 /* Note: we assume the two high bytes are set to zero */
1463 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1465 if (TCG_TARGET_HAS_bswap16_i32
) {
1466 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
1468 TCGv_i32 t0
= tcg_temp_new_i32();
1470 tcg_gen_ext8u_i32(t0
, arg
);
1471 tcg_gen_shli_i32(t0
, t0
, 8);
1472 tcg_gen_shri_i32(ret
, arg
, 8);
1473 tcg_gen_or_i32(ret
, ret
, t0
);
1474 tcg_temp_free_i32(t0
);
1478 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1480 if (TCG_TARGET_HAS_bswap32_i32
) {
1481 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
1484 t0
= tcg_temp_new_i32();
1485 t1
= tcg_temp_new_i32();
1487 tcg_gen_shli_i32(t0
, arg
, 24);
1489 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
1490 tcg_gen_shli_i32(t1
, t1
, 8);
1491 tcg_gen_or_i32(t0
, t0
, t1
);
1493 tcg_gen_shri_i32(t1
, arg
, 8);
1494 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
1495 tcg_gen_or_i32(t0
, t0
, t1
);
1497 tcg_gen_shri_i32(t1
, arg
, 24);
1498 tcg_gen_or_i32(ret
, t0
, t1
);
1499 tcg_temp_free_i32(t0
);
1500 tcg_temp_free_i32(t1
);
1504 #if TCG_TARGET_REG_BITS == 32
1505 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1507 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1508 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1511 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1513 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1514 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1517 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1519 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1520 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1523 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1525 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1526 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1529 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1531 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1532 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1535 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1537 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1538 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1541 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
1543 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
1546 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1548 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1549 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1552 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1554 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1555 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1558 /* Note: we assume the six high bytes are set to zero */
1559 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1561 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1562 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1565 /* Note: we assume the four high bytes are set to zero */
1566 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1568 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1569 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1572 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1575 t0
= tcg_temp_new_i32();
1576 t1
= tcg_temp_new_i32();
1578 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1579 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1580 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1581 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1582 tcg_temp_free_i32(t0
);
1583 tcg_temp_free_i32(t1
);
1587 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1589 if (TCG_TARGET_HAS_ext8s_i64
) {
1590 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1592 tcg_gen_shli_i64(ret
, arg
, 56);
1593 tcg_gen_sari_i64(ret
, ret
, 56);
1597 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1599 if (TCG_TARGET_HAS_ext16s_i64
) {
1600 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1602 tcg_gen_shli_i64(ret
, arg
, 48);
1603 tcg_gen_sari_i64(ret
, ret
, 48);
1607 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1609 if (TCG_TARGET_HAS_ext32s_i64
) {
1610 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1612 tcg_gen_shli_i64(ret
, arg
, 32);
1613 tcg_gen_sari_i64(ret
, ret
, 32);
1617 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1619 if (TCG_TARGET_HAS_ext8u_i64
) {
1620 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1622 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1626 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1628 if (TCG_TARGET_HAS_ext16u_i64
) {
1629 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1631 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1635 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1637 if (TCG_TARGET_HAS_ext32u_i64
) {
1638 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1640 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1644 /* Note: we assume the target supports move between 32 and 64 bit
1645 registers. This will probably break MIPS64 targets. */
1646 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
1648 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(arg
)));
1651 /* Note: we assume the target supports move between 32 and 64 bit
1653 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1655 tcg_gen_ext32u_i64(ret
, MAKE_TCGV_I64(GET_TCGV_I32(arg
)));
1658 /* Note: we assume the target supports move between 32 and 64 bit
1660 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1662 tcg_gen_ext32s_i64(ret
, MAKE_TCGV_I64(GET_TCGV_I32(arg
)));
1665 /* Note: we assume the six high bytes are set to zero */
1666 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1668 if (TCG_TARGET_HAS_bswap16_i64
) {
1669 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1671 TCGv_i64 t0
= tcg_temp_new_i64();
1673 tcg_gen_ext8u_i64(t0
, arg
);
1674 tcg_gen_shli_i64(t0
, t0
, 8);
1675 tcg_gen_shri_i64(ret
, arg
, 8);
1676 tcg_gen_or_i64(ret
, ret
, t0
);
1677 tcg_temp_free_i64(t0
);
1681 /* Note: we assume the four high bytes are set to zero */
1682 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1684 if (TCG_TARGET_HAS_bswap32_i64
) {
1685 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1688 t0
= tcg_temp_new_i64();
1689 t1
= tcg_temp_new_i64();
1691 tcg_gen_shli_i64(t0
, arg
, 24);
1692 tcg_gen_ext32u_i64(t0
, t0
);
1694 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1695 tcg_gen_shli_i64(t1
, t1
, 8);
1696 tcg_gen_or_i64(t0
, t0
, t1
);
1698 tcg_gen_shri_i64(t1
, arg
, 8);
1699 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1700 tcg_gen_or_i64(t0
, t0
, t1
);
1702 tcg_gen_shri_i64(t1
, arg
, 24);
1703 tcg_gen_or_i64(ret
, t0
, t1
);
1704 tcg_temp_free_i64(t0
);
1705 tcg_temp_free_i64(t1
);
1709 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1711 if (TCG_TARGET_HAS_bswap64_i64
) {
1712 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1714 TCGv_i64 t0
= tcg_temp_new_i64();
1715 TCGv_i64 t1
= tcg_temp_new_i64();
1717 tcg_gen_shli_i64(t0
, arg
, 56);
1719 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1720 tcg_gen_shli_i64(t1
, t1
, 40);
1721 tcg_gen_or_i64(t0
, t0
, t1
);
1723 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1724 tcg_gen_shli_i64(t1
, t1
, 24);
1725 tcg_gen_or_i64(t0
, t0
, t1
);
1727 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1728 tcg_gen_shli_i64(t1
, t1
, 8);
1729 tcg_gen_or_i64(t0
, t0
, t1
);
1731 tcg_gen_shri_i64(t1
, arg
, 8);
1732 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1733 tcg_gen_or_i64(t0
, t0
, t1
);
1735 tcg_gen_shri_i64(t1
, arg
, 24);
1736 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1737 tcg_gen_or_i64(t0
, t0
, t1
);
1739 tcg_gen_shri_i64(t1
, arg
, 40);
1740 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1741 tcg_gen_or_i64(t0
, t0
, t1
);
1743 tcg_gen_shri_i64(t1
, arg
, 56);
1744 tcg_gen_or_i64(ret
, t0
, t1
);
1745 tcg_temp_free_i64(t0
);
1746 tcg_temp_free_i64(t1
);
1752 static inline void tcg_gen_neg_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1754 if (TCG_TARGET_HAS_neg_i32
) {
1755 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg
);
1757 TCGv_i32 t0
= tcg_const_i32(0);
1758 tcg_gen_sub_i32(ret
, t0
, arg
);
1759 tcg_temp_free_i32(t0
);
1763 static inline void tcg_gen_neg_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1765 if (TCG_TARGET_HAS_neg_i64
) {
1766 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg
);
1768 TCGv_i64 t0
= tcg_const_i64(0);
1769 tcg_gen_sub_i64(ret
, t0
, arg
);
1770 tcg_temp_free_i64(t0
);
1774 static inline void tcg_gen_not_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1776 if (TCG_TARGET_HAS_not_i32
) {
1777 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg
);
1779 tcg_gen_xori_i32(ret
, arg
, -1);
1783 static inline void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1785 #if TCG_TARGET_REG_BITS == 64
1786 if (TCG_TARGET_HAS_not_i64
) {
1787 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1789 tcg_gen_xori_i64(ret
, arg
, -1);
1792 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1793 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1797 static inline void tcg_gen_discard_i32(TCGv_i32 arg
)
1799 tcg_gen_op1_i32(INDEX_op_discard
, arg
);
1802 static inline void tcg_gen_discard_i64(TCGv_i64 arg
)
1804 #if TCG_TARGET_REG_BITS == 32
1805 tcg_gen_discard_i32(TCGV_LOW(arg
));
1806 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1808 tcg_gen_op1_i64(INDEX_op_discard
, arg
);
1812 static inline void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1814 if (TCG_TARGET_HAS_andc_i32
) {
1815 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
1817 TCGv_i32 t0
= tcg_temp_new_i32();
1818 tcg_gen_not_i32(t0
, arg2
);
1819 tcg_gen_and_i32(ret
, arg1
, t0
);
1820 tcg_temp_free_i32(t0
);
1824 static inline void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1826 #if TCG_TARGET_REG_BITS == 64
1827 if (TCG_TARGET_HAS_andc_i64
) {
1828 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1830 TCGv_i64 t0
= tcg_temp_new_i64();
1831 tcg_gen_not_i64(t0
, arg2
);
1832 tcg_gen_and_i64(ret
, arg1
, t0
);
1833 tcg_temp_free_i64(t0
);
1836 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1837 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1841 static inline void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1843 if (TCG_TARGET_HAS_eqv_i32
) {
1844 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
1846 tcg_gen_xor_i32(ret
, arg1
, arg2
);
1847 tcg_gen_not_i32(ret
, ret
);
1851 static inline void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1853 #if TCG_TARGET_REG_BITS == 64
1854 if (TCG_TARGET_HAS_eqv_i64
) {
1855 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1857 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1858 tcg_gen_not_i64(ret
, ret
);
1861 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1862 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1866 static inline void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1868 if (TCG_TARGET_HAS_nand_i32
) {
1869 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
1871 tcg_gen_and_i32(ret
, arg1
, arg2
);
1872 tcg_gen_not_i32(ret
, ret
);
1876 static inline void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1878 #if TCG_TARGET_REG_BITS == 64
1879 if (TCG_TARGET_HAS_nand_i64
) {
1880 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1882 tcg_gen_and_i64(ret
, arg1
, arg2
);
1883 tcg_gen_not_i64(ret
, ret
);
1886 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1887 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1891 static inline void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1893 if (TCG_TARGET_HAS_nor_i32
) {
1894 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
1896 tcg_gen_or_i32(ret
, arg1
, arg2
);
1897 tcg_gen_not_i32(ret
, ret
);
1901 static inline void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1903 #if TCG_TARGET_REG_BITS == 64
1904 if (TCG_TARGET_HAS_nor_i64
) {
1905 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1907 tcg_gen_or_i64(ret
, arg1
, arg2
);
1908 tcg_gen_not_i64(ret
, ret
);
1911 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1912 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1916 static inline void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1918 if (TCG_TARGET_HAS_orc_i32
) {
1919 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
1921 TCGv_i32 t0
= tcg_temp_new_i32();
1922 tcg_gen_not_i32(t0
, arg2
);
1923 tcg_gen_or_i32(ret
, arg1
, t0
);
1924 tcg_temp_free_i32(t0
);
1928 static inline void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1930 #if TCG_TARGET_REG_BITS == 64
1931 if (TCG_TARGET_HAS_orc_i64
) {
1932 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1934 TCGv_i64 t0
= tcg_temp_new_i64();
1935 tcg_gen_not_i64(t0
, arg2
);
1936 tcg_gen_or_i64(ret
, arg1
, t0
);
1937 tcg_temp_free_i64(t0
);
1940 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1941 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1945 static inline void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1947 if (TCG_TARGET_HAS_rot_i32
) {
1948 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
1952 t0
= tcg_temp_new_i32();
1953 t1
= tcg_temp_new_i32();
1954 tcg_gen_shl_i32(t0
, arg1
, arg2
);
1955 tcg_gen_subfi_i32(t1
, 32, arg2
);
1956 tcg_gen_shr_i32(t1
, arg1
, t1
);
1957 tcg_gen_or_i32(ret
, t0
, t1
);
1958 tcg_temp_free_i32(t0
);
1959 tcg_temp_free_i32(t1
);
1963 static inline void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1965 if (TCG_TARGET_HAS_rot_i64
) {
1966 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1969 t0
= tcg_temp_new_i64();
1970 t1
= tcg_temp_new_i64();
1971 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1972 tcg_gen_subfi_i64(t1
, 64, arg2
);
1973 tcg_gen_shr_i64(t1
, arg1
, t1
);
1974 tcg_gen_or_i64(ret
, t0
, t1
);
1975 tcg_temp_free_i64(t0
);
1976 tcg_temp_free_i64(t1
);
1980 static inline void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
1982 /* some cases can be optimized here */
1984 tcg_gen_mov_i32(ret
, arg1
);
1985 } else if (TCG_TARGET_HAS_rot_i32
) {
1986 TCGv_i32 t0
= tcg_const_i32(arg2
);
1987 tcg_gen_rotl_i32(ret
, arg1
, t0
);
1988 tcg_temp_free_i32(t0
);
1991 t0
= tcg_temp_new_i32();
1992 t1
= tcg_temp_new_i32();
1993 tcg_gen_shli_i32(t0
, arg1
, arg2
);
1994 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
1995 tcg_gen_or_i32(ret
, t0
, t1
);
1996 tcg_temp_free_i32(t0
);
1997 tcg_temp_free_i32(t1
);
2001 static inline void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2003 /* some cases can be optimized here */
2005 tcg_gen_mov_i64(ret
, arg1
);
2006 } else if (TCG_TARGET_HAS_rot_i64
) {
2007 TCGv_i64 t0
= tcg_const_i64(arg2
);
2008 tcg_gen_rotl_i64(ret
, arg1
, t0
);
2009 tcg_temp_free_i64(t0
);
2012 t0
= tcg_temp_new_i64();
2013 t1
= tcg_temp_new_i64();
2014 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2015 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2016 tcg_gen_or_i64(ret
, t0
, t1
);
2017 tcg_temp_free_i64(t0
);
2018 tcg_temp_free_i64(t1
);
2022 static inline void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
2024 if (TCG_TARGET_HAS_rot_i32
) {
2025 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
2029 t0
= tcg_temp_new_i32();
2030 t1
= tcg_temp_new_i32();
2031 tcg_gen_shr_i32(t0
, arg1
, arg2
);
2032 tcg_gen_subfi_i32(t1
, 32, arg2
);
2033 tcg_gen_shl_i32(t1
, arg1
, t1
);
2034 tcg_gen_or_i32(ret
, t0
, t1
);
2035 tcg_temp_free_i32(t0
);
2036 tcg_temp_free_i32(t1
);
2040 static inline void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2042 if (TCG_TARGET_HAS_rot_i64
) {
2043 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2046 t0
= tcg_temp_new_i64();
2047 t1
= tcg_temp_new_i64();
2048 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2049 tcg_gen_subfi_i64(t1
, 64, arg2
);
2050 tcg_gen_shl_i64(t1
, arg1
, t1
);
2051 tcg_gen_or_i64(ret
, t0
, t1
);
2052 tcg_temp_free_i64(t0
);
2053 tcg_temp_free_i64(t1
);
2057 static inline void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
2059 /* some cases can be optimized here */
2061 tcg_gen_mov_i32(ret
, arg1
);
2063 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
2067 static inline void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2069 /* some cases can be optimized here */
2071 tcg_gen_mov_i64(ret
, arg1
);
2073 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2077 static inline void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
,
2078 TCGv_i32 arg2
, unsigned int ofs
,
2084 tcg_debug_assert(ofs
< 32);
2085 tcg_debug_assert(len
<= 32);
2086 tcg_debug_assert(ofs
+ len
<= 32);
2088 if (ofs
== 0 && len
== 32) {
2089 tcg_gen_mov_i32(ret
, arg2
);
2092 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
2093 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
2097 mask
= (1u << len
) - 1;
2098 t1
= tcg_temp_new_i32();
2100 if (ofs
+ len
< 32) {
2101 tcg_gen_andi_i32(t1
, arg2
, mask
);
2102 tcg_gen_shli_i32(t1
, t1
, ofs
);
2104 tcg_gen_shli_i32(t1
, arg2
, ofs
);
2106 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
2107 tcg_gen_or_i32(ret
, ret
, t1
);
2109 tcg_temp_free_i32(t1
);
2112 static inline void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
2113 TCGv_i64 arg2
, unsigned int ofs
,
2119 tcg_debug_assert(ofs
< 64);
2120 tcg_debug_assert(len
<= 64);
2121 tcg_debug_assert(ofs
+ len
<= 64);
2123 if (ofs
== 0 && len
== 64) {
2124 tcg_gen_mov_i64(ret
, arg2
);
2127 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2128 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2132 #if TCG_TARGET_REG_BITS == 32
2134 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2135 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2136 TCGV_LOW(arg2
), ofs
- 32, len
);
2139 if (ofs
+ len
<= 32) {
2140 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2141 TCGV_LOW(arg2
), ofs
, len
);
2142 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2147 mask
= (1ull << len
) - 1;
2148 t1
= tcg_temp_new_i64();
2150 if (ofs
+ len
< 64) {
2151 tcg_gen_andi_i64(t1
, arg2
, mask
);
2152 tcg_gen_shli_i64(t1
, t1
, ofs
);
2154 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2156 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2157 tcg_gen_or_i64(ret
, ret
, t1
);
2159 tcg_temp_free_i64(t1
);
2162 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
,
2165 #if TCG_TARGET_REG_BITS == 32
2166 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2167 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2169 TCGv_i64 tmp
= tcg_temp_new_i64();
2170 /* These extensions are only needed for type correctness.
2171 We may be able to do better given target specific information. */
2172 tcg_gen_extu_i32_i64(tmp
, high
);
2173 tcg_gen_extu_i32_i64(dest
, low
);
2174 /* If deposit is available, use it. Otherwise use the extra
2175 knowledge that we have of the zero-extensions above. */
2176 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2177 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2179 tcg_gen_shli_i64(tmp
, tmp
, 32);
2180 tcg_gen_or_i64(dest
, dest
, tmp
);
2182 tcg_temp_free_i64(tmp
);
2186 static inline void tcg_gen_concat32_i64(TCGv_i64 dest
, TCGv_i64 low
,
2189 tcg_gen_deposit_i64(dest
, low
, high
, 32, 32);
2192 static inline void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
,
2193 TCGv_i32 c1
, TCGv_i32 c2
,
2194 TCGv_i32 v1
, TCGv_i32 v2
)
2196 if (TCG_TARGET_HAS_movcond_i32
) {
2197 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
2199 TCGv_i32 t0
= tcg_temp_new_i32();
2200 TCGv_i32 t1
= tcg_temp_new_i32();
2201 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
2202 tcg_gen_neg_i32(t0
, t0
);
2203 tcg_gen_and_i32(t1
, v1
, t0
);
2204 tcg_gen_andc_i32(ret
, v2
, t0
);
2205 tcg_gen_or_i32(ret
, ret
, t1
);
2206 tcg_temp_free_i32(t0
);
2207 tcg_temp_free_i32(t1
);
2211 static inline void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
,
2212 TCGv_i64 c1
, TCGv_i64 c2
,
2213 TCGv_i64 v1
, TCGv_i64 v2
)
2215 #if TCG_TARGET_REG_BITS == 32
2216 TCGv_i32 t0
= tcg_temp_new_i32();
2217 TCGv_i32 t1
= tcg_temp_new_i32();
2218 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2219 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2220 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2222 if (TCG_TARGET_HAS_movcond_i32
) {
2223 tcg_gen_movi_i32(t1
, 0);
2224 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2225 TCGV_LOW(v1
), TCGV_LOW(v2
));
2226 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2227 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2229 tcg_gen_neg_i32(t0
, t0
);
2231 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2232 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2233 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2235 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2236 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2237 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2239 tcg_temp_free_i32(t0
);
2240 tcg_temp_free_i32(t1
);
2242 if (TCG_TARGET_HAS_movcond_i64
) {
2243 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2245 TCGv_i64 t0
= tcg_temp_new_i64();
2246 TCGv_i64 t1
= tcg_temp_new_i64();
2247 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2248 tcg_gen_neg_i64(t0
, t0
);
2249 tcg_gen_and_i64(t1
, v1
, t0
);
2250 tcg_gen_andc_i64(ret
, v2
, t0
);
2251 tcg_gen_or_i64(ret
, ret
, t1
);
2252 tcg_temp_free_i64(t0
);
2253 tcg_temp_free_i64(t1
);
2258 /***************************************/
2259 /* QEMU specific operations. Their type depend on the QEMU CPU
2261 #ifndef TARGET_LONG_BITS
2262 #error must include QEMU headers
2265 #if TARGET_LONG_BITS == 32
2266 #define TCGv TCGv_i32
2267 #define tcg_temp_new() tcg_temp_new_i32()
2268 #define tcg_global_reg_new tcg_global_reg_new_i32
2269 #define tcg_global_mem_new tcg_global_mem_new_i32
2270 #define tcg_temp_local_new() tcg_temp_local_new_i32()
2271 #define tcg_temp_free tcg_temp_free_i32
2272 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32
2273 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32
2274 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2275 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2277 #define TCGv TCGv_i64
2278 #define tcg_temp_new() tcg_temp_new_i64()
2279 #define tcg_global_reg_new tcg_global_reg_new_i64
2280 #define tcg_global_mem_new tcg_global_mem_new_i64
2281 #define tcg_temp_local_new() tcg_temp_local_new_i64()
2282 #define tcg_temp_free tcg_temp_free_i64
2283 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64
2284 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64
2285 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2286 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2289 /* debug info: write the PC of the corresponding QEMU CPU instruction */
2290 static inline void tcg_gen_debug_insn_start(uint64_t pc
)
2292 /* XXX: must really use a 32 bit size for TCGArg in all cases */
2293 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2294 tcg_gen_op2ii(INDEX_op_debug_insn_start
,
2295 (uint32_t)(pc
), (uint32_t)(pc
>> 32));
2297 tcg_gen_op1i(INDEX_op_debug_insn_start
, pc
);
2301 static inline void tcg_gen_exit_tb(tcg_target_long val
)
2303 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2306 static inline void tcg_gen_goto_tb(unsigned idx
)
2308 /* We only support two chained exits. */
2309 tcg_debug_assert(idx
<= 1);
2310 #ifdef CONFIG_DEBUG_TCG
2311 /* Verify that we havn't seen this numbered exit before. */
2312 tcg_debug_assert((tcg_ctx
.goto_tb_issue_mask
& (1 << idx
)) == 0);
2313 tcg_ctx
.goto_tb_issue_mask
|= 1 << idx
;
2315 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2318 #if TCG_TARGET_REG_BITS == 32
2319 static inline void tcg_gen_qemu_ld8u(TCGv ret
, TCGv addr
, int mem_index
)
2321 #if TARGET_LONG_BITS == 32
2322 tcg_gen_op3i_i32(INDEX_op_qemu_ld8u
, ret
, addr
, mem_index
);
2324 tcg_gen_op4i_i32(INDEX_op_qemu_ld8u
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2325 TCGV_HIGH(addr
), mem_index
);
2326 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2330 static inline void tcg_gen_qemu_ld8s(TCGv ret
, TCGv addr
, int mem_index
)
2332 #if TARGET_LONG_BITS == 32
2333 tcg_gen_op3i_i32(INDEX_op_qemu_ld8s
, ret
, addr
, mem_index
);
2335 tcg_gen_op4i_i32(INDEX_op_qemu_ld8s
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2336 TCGV_HIGH(addr
), mem_index
);
2337 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2341 static inline void tcg_gen_qemu_ld16u(TCGv ret
, TCGv addr
, int mem_index
)
2343 #if TARGET_LONG_BITS == 32
2344 tcg_gen_op3i_i32(INDEX_op_qemu_ld16u
, ret
, addr
, mem_index
);
2346 tcg_gen_op4i_i32(INDEX_op_qemu_ld16u
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2347 TCGV_HIGH(addr
), mem_index
);
2348 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2352 static inline void tcg_gen_qemu_ld16s(TCGv ret
, TCGv addr
, int mem_index
)
2354 #if TARGET_LONG_BITS == 32
2355 tcg_gen_op3i_i32(INDEX_op_qemu_ld16s
, ret
, addr
, mem_index
);
2357 tcg_gen_op4i_i32(INDEX_op_qemu_ld16s
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2358 TCGV_HIGH(addr
), mem_index
);
2359 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2363 static inline void tcg_gen_qemu_ld32u(TCGv ret
, TCGv addr
, int mem_index
)
2365 #if TARGET_LONG_BITS == 32
2366 tcg_gen_op3i_i32(INDEX_op_qemu_ld32
, ret
, addr
, mem_index
);
2368 tcg_gen_op4i_i32(INDEX_op_qemu_ld32
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2369 TCGV_HIGH(addr
), mem_index
);
2370 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2374 static inline void tcg_gen_qemu_ld32s(TCGv ret
, TCGv addr
, int mem_index
)
2376 #if TARGET_LONG_BITS == 32
2377 tcg_gen_op3i_i32(INDEX_op_qemu_ld32
, ret
, addr
, mem_index
);
2379 tcg_gen_op4i_i32(INDEX_op_qemu_ld32
, TCGV_LOW(ret
), TCGV_LOW(addr
),
2380 TCGV_HIGH(addr
), mem_index
);
2381 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2385 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret
, TCGv addr
, int mem_index
)
2387 #if TARGET_LONG_BITS == 32
2388 tcg_gen_op4i_i32(INDEX_op_qemu_ld64
, TCGV_LOW(ret
), TCGV_HIGH(ret
), addr
, mem_index
);
2390 tcg_gen_op5i_i32(INDEX_op_qemu_ld64
, TCGV_LOW(ret
), TCGV_HIGH(ret
),
2391 TCGV_LOW(addr
), TCGV_HIGH(addr
), mem_index
);
2395 static inline void tcg_gen_qemu_st8(TCGv arg
, TCGv addr
, int mem_index
)
2397 #if TARGET_LONG_BITS == 32
2398 tcg_gen_op3i_i32(INDEX_op_qemu_st8
, arg
, addr
, mem_index
);
2400 tcg_gen_op4i_i32(INDEX_op_qemu_st8
, TCGV_LOW(arg
), TCGV_LOW(addr
),
2401 TCGV_HIGH(addr
), mem_index
);
2405 static inline void tcg_gen_qemu_st16(TCGv arg
, TCGv addr
, int mem_index
)
2407 #if TARGET_LONG_BITS == 32
2408 tcg_gen_op3i_i32(INDEX_op_qemu_st16
, arg
, addr
, mem_index
);
2410 tcg_gen_op4i_i32(INDEX_op_qemu_st16
, TCGV_LOW(arg
), TCGV_LOW(addr
),
2411 TCGV_HIGH(addr
), mem_index
);
2415 static inline void tcg_gen_qemu_st32(TCGv arg
, TCGv addr
, int mem_index
)
2417 #if TARGET_LONG_BITS == 32
2418 tcg_gen_op3i_i32(INDEX_op_qemu_st32
, arg
, addr
, mem_index
);
2420 tcg_gen_op4i_i32(INDEX_op_qemu_st32
, TCGV_LOW(arg
), TCGV_LOW(addr
),
2421 TCGV_HIGH(addr
), mem_index
);
2425 static inline void tcg_gen_qemu_st64(TCGv_i64 arg
, TCGv addr
, int mem_index
)
2427 #if TARGET_LONG_BITS == 32
2428 tcg_gen_op4i_i32(INDEX_op_qemu_st64
, TCGV_LOW(arg
), TCGV_HIGH(arg
), addr
,
2431 tcg_gen_op5i_i32(INDEX_op_qemu_st64
, TCGV_LOW(arg
), TCGV_HIGH(arg
),
2432 TCGV_LOW(addr
), TCGV_HIGH(addr
), mem_index
);
2436 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2437 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2439 #else /* TCG_TARGET_REG_BITS == 32 */
2441 static inline void tcg_gen_qemu_ld8u(TCGv ret
, TCGv addr
, int mem_index
)
2443 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u
, ret
, addr
, mem_index
);
2446 static inline void tcg_gen_qemu_ld8s(TCGv ret
, TCGv addr
, int mem_index
)
2448 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s
, ret
, addr
, mem_index
);
2451 static inline void tcg_gen_qemu_ld16u(TCGv ret
, TCGv addr
, int mem_index
)
2453 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u
, ret
, addr
, mem_index
);
2456 static inline void tcg_gen_qemu_ld16s(TCGv ret
, TCGv addr
, int mem_index
)
2458 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s
, ret
, addr
, mem_index
);
2461 static inline void tcg_gen_qemu_ld32u(TCGv ret
, TCGv addr
, int mem_index
)
2463 #if TARGET_LONG_BITS == 32
2464 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32
, ret
, addr
, mem_index
);
2466 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u
, ret
, addr
, mem_index
);
2470 static inline void tcg_gen_qemu_ld32s(TCGv ret
, TCGv addr
, int mem_index
)
2472 #if TARGET_LONG_BITS == 32
2473 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32
, ret
, addr
, mem_index
);
2475 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s
, ret
, addr
, mem_index
);
2479 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret
, TCGv addr
, int mem_index
)
2481 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64
, ret
, addr
, mem_index
);
2484 static inline void tcg_gen_qemu_st8(TCGv arg
, TCGv addr
, int mem_index
)
2486 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8
, arg
, addr
, mem_index
);
2489 static inline void tcg_gen_qemu_st16(TCGv arg
, TCGv addr
, int mem_index
)
2491 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16
, arg
, addr
, mem_index
);
2494 static inline void tcg_gen_qemu_st32(TCGv arg
, TCGv addr
, int mem_index
)
2496 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32
, arg
, addr
, mem_index
);
2499 static inline void tcg_gen_qemu_st64(TCGv_i64 arg
, TCGv addr
, int mem_index
)
2501 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64
, arg
, addr
, mem_index
);
2504 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2505 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2507 #endif /* TCG_TARGET_REG_BITS != 32 */
2509 #if TARGET_LONG_BITS == 64
2510 #define tcg_gen_movi_tl tcg_gen_movi_i64
2511 #define tcg_gen_mov_tl tcg_gen_mov_i64
2512 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2513 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2514 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2515 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2516 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2517 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2518 #define tcg_gen_ld_tl tcg_gen_ld_i64
2519 #define tcg_gen_st8_tl tcg_gen_st8_i64
2520 #define tcg_gen_st16_tl tcg_gen_st16_i64
2521 #define tcg_gen_st32_tl tcg_gen_st32_i64
2522 #define tcg_gen_st_tl tcg_gen_st_i64
2523 #define tcg_gen_add_tl tcg_gen_add_i64
2524 #define tcg_gen_addi_tl tcg_gen_addi_i64
2525 #define tcg_gen_sub_tl tcg_gen_sub_i64
2526 #define tcg_gen_neg_tl tcg_gen_neg_i64
2527 #define tcg_gen_subfi_tl tcg_gen_subfi_i64
2528 #define tcg_gen_subi_tl tcg_gen_subi_i64
2529 #define tcg_gen_and_tl tcg_gen_and_i64
2530 #define tcg_gen_andi_tl tcg_gen_andi_i64
2531 #define tcg_gen_or_tl tcg_gen_or_i64
2532 #define tcg_gen_ori_tl tcg_gen_ori_i64
2533 #define tcg_gen_xor_tl tcg_gen_xor_i64
2534 #define tcg_gen_xori_tl tcg_gen_xori_i64
2535 #define tcg_gen_not_tl tcg_gen_not_i64
2536 #define tcg_gen_shl_tl tcg_gen_shl_i64
2537 #define tcg_gen_shli_tl tcg_gen_shli_i64
2538 #define tcg_gen_shr_tl tcg_gen_shr_i64
2539 #define tcg_gen_shri_tl tcg_gen_shri_i64
2540 #define tcg_gen_sar_tl tcg_gen_sar_i64
2541 #define tcg_gen_sari_tl tcg_gen_sari_i64
2542 #define tcg_gen_brcond_tl tcg_gen_brcond_i64
2543 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2544 #define tcg_gen_setcond_tl tcg_gen_setcond_i64
2545 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2546 #define tcg_gen_mul_tl tcg_gen_mul_i64
2547 #define tcg_gen_muli_tl tcg_gen_muli_i64
2548 #define tcg_gen_div_tl tcg_gen_div_i64
2549 #define tcg_gen_rem_tl tcg_gen_rem_i64
2550 #define tcg_gen_divu_tl tcg_gen_divu_i64
2551 #define tcg_gen_remu_tl tcg_gen_remu_i64
2552 #define tcg_gen_discard_tl tcg_gen_discard_i64
2553 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2554 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2555 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2556 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2557 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2558 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2559 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2560 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2561 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2562 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2563 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2564 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2565 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2566 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2567 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2568 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2569 #define tcg_gen_andc_tl tcg_gen_andc_i64
2570 #define tcg_gen_eqv_tl tcg_gen_eqv_i64
2571 #define tcg_gen_nand_tl tcg_gen_nand_i64
2572 #define tcg_gen_nor_tl tcg_gen_nor_i64
2573 #define tcg_gen_orc_tl tcg_gen_orc_i64
2574 #define tcg_gen_rotl_tl tcg_gen_rotl_i64
2575 #define tcg_gen_rotli_tl tcg_gen_rotli_i64
2576 #define tcg_gen_rotr_tl tcg_gen_rotr_i64
2577 #define tcg_gen_rotri_tl tcg_gen_rotri_i64
2578 #define tcg_gen_deposit_tl tcg_gen_deposit_i64
2579 #define tcg_const_tl tcg_const_i64
2580 #define tcg_const_local_tl tcg_const_local_i64
2581 #define tcg_gen_movcond_tl tcg_gen_movcond_i64
2583 #define tcg_gen_movi_tl tcg_gen_movi_i32
2584 #define tcg_gen_mov_tl tcg_gen_mov_i32
2585 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2586 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2587 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2588 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2589 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
2590 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
2591 #define tcg_gen_ld_tl tcg_gen_ld_i32
2592 #define tcg_gen_st8_tl tcg_gen_st8_i32
2593 #define tcg_gen_st16_tl tcg_gen_st16_i32
2594 #define tcg_gen_st32_tl tcg_gen_st_i32
2595 #define tcg_gen_st_tl tcg_gen_st_i32
2596 #define tcg_gen_add_tl tcg_gen_add_i32
2597 #define tcg_gen_addi_tl tcg_gen_addi_i32
2598 #define tcg_gen_sub_tl tcg_gen_sub_i32
2599 #define tcg_gen_neg_tl tcg_gen_neg_i32
2600 #define tcg_gen_subfi_tl tcg_gen_subfi_i32
2601 #define tcg_gen_subi_tl tcg_gen_subi_i32
2602 #define tcg_gen_and_tl tcg_gen_and_i32
2603 #define tcg_gen_andi_tl tcg_gen_andi_i32
2604 #define tcg_gen_or_tl tcg_gen_or_i32
2605 #define tcg_gen_ori_tl tcg_gen_ori_i32
2606 #define tcg_gen_xor_tl tcg_gen_xor_i32
2607 #define tcg_gen_xori_tl tcg_gen_xori_i32
2608 #define tcg_gen_not_tl tcg_gen_not_i32
2609 #define tcg_gen_shl_tl tcg_gen_shl_i32
2610 #define tcg_gen_shli_tl tcg_gen_shli_i32
2611 #define tcg_gen_shr_tl tcg_gen_shr_i32
2612 #define tcg_gen_shri_tl tcg_gen_shri_i32
2613 #define tcg_gen_sar_tl tcg_gen_sar_i32
2614 #define tcg_gen_sari_tl tcg_gen_sari_i32
2615 #define tcg_gen_brcond_tl tcg_gen_brcond_i32
2616 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2617 #define tcg_gen_setcond_tl tcg_gen_setcond_i32
2618 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2619 #define tcg_gen_mul_tl tcg_gen_mul_i32
2620 #define tcg_gen_muli_tl tcg_gen_muli_i32
2621 #define tcg_gen_div_tl tcg_gen_div_i32
2622 #define tcg_gen_rem_tl tcg_gen_rem_i32
2623 #define tcg_gen_divu_tl tcg_gen_divu_i32
2624 #define tcg_gen_remu_tl tcg_gen_remu_i32
2625 #define tcg_gen_discard_tl tcg_gen_discard_i32
2626 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2627 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2628 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2629 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2630 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2631 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2632 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2633 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2634 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2635 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2636 #define tcg_gen_ext32u_tl tcg_gen_mov_i32
2637 #define tcg_gen_ext32s_tl tcg_gen_mov_i32
2638 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2639 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2640 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2641 #define tcg_gen_andc_tl tcg_gen_andc_i32
2642 #define tcg_gen_eqv_tl tcg_gen_eqv_i32
2643 #define tcg_gen_nand_tl tcg_gen_nand_i32
2644 #define tcg_gen_nor_tl tcg_gen_nor_i32
2645 #define tcg_gen_orc_tl tcg_gen_orc_i32
2646 #define tcg_gen_rotl_tl tcg_gen_rotl_i32
2647 #define tcg_gen_rotli_tl tcg_gen_rotli_i32
2648 #define tcg_gen_rotr_tl tcg_gen_rotr_i32
2649 #define tcg_gen_rotri_tl tcg_gen_rotri_i32
2650 #define tcg_gen_deposit_tl tcg_gen_deposit_i32
2651 #define tcg_const_tl tcg_const_i32
2652 #define tcg_const_local_tl tcg_const_local_i32
2653 #define tcg_gen_movcond_tl tcg_gen_movcond_i32
2656 #if TCG_TARGET_REG_BITS == 32
2657 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \
2658 TCGV_PTR_TO_NAT(A), \
2660 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \
2661 TCGV_PTR_TO_NAT(A), (B))
2662 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
2663 #else /* TCG_TARGET_REG_BITS == 32 */
2664 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \
2665 TCGV_PTR_TO_NAT(A), \
2667 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), \
2668 TCGV_PTR_TO_NAT(A), (B))
2669 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
2670 #endif /* TCG_TARGET_REG_BITS != 32 */