2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
28 #include "exec/exec-all.h"
32 #include "trace-tcg.h"
33 #include "trace/mem.h"
35 /* Reduce the number of ifdefs below. This assumes that all uses of
36 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
37 the compiler can eliminate. */
38 #if TCG_TARGET_REG_BITS == 64
39 extern TCGv_i32
TCGV_LOW_link_error(TCGv_i64
);
40 extern TCGv_i32
TCGV_HIGH_link_error(TCGv_i64
);
41 #define TCGV_LOW TCGV_LOW_link_error
42 #define TCGV_HIGH TCGV_HIGH_link_error
45 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
47 TCGOp
*op
= tcg_emit_op(opc
);
51 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
53 TCGOp
*op
= tcg_emit_op(opc
);
58 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
60 TCGOp
*op
= tcg_emit_op(opc
);
66 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
68 TCGOp
*op
= tcg_emit_op(opc
);
75 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
78 TCGOp
*op
= tcg_emit_op(opc
);
86 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
87 TCGArg a4
, TCGArg a5
, TCGArg a6
)
89 TCGOp
*op
= tcg_emit_op(opc
);
98 void tcg_gen_mb(TCGBar mb_type
)
100 if (tcg_ctx
->tb_cflags
& CF_PARALLEL
) {
101 tcg_gen_op1(INDEX_op_mb
, mb_type
);
107 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
109 /* some cases can be optimized here */
111 tcg_gen_mov_i32(ret
, arg1
);
113 TCGv_i32 t0
= tcg_const_i32(arg2
);
114 tcg_gen_add_i32(ret
, arg1
, t0
);
115 tcg_temp_free_i32(t0
);
119 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
121 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
122 /* Don't recurse with tcg_gen_neg_i32. */
123 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
125 TCGv_i32 t0
= tcg_const_i32(arg1
);
126 tcg_gen_sub_i32(ret
, t0
, arg2
);
127 tcg_temp_free_i32(t0
);
131 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
133 /* some cases can be optimized here */
135 tcg_gen_mov_i32(ret
, arg1
);
137 TCGv_i32 t0
= tcg_const_i32(arg2
);
138 tcg_gen_sub_i32(ret
, arg1
, t0
);
139 tcg_temp_free_i32(t0
);
143 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
146 /* Some cases can be optimized here. */
149 tcg_gen_movi_i32(ret
, 0);
152 tcg_gen_mov_i32(ret
, arg1
);
155 /* Don't recurse with tcg_gen_ext8u_i32. */
156 if (TCG_TARGET_HAS_ext8u_i32
) {
157 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
162 if (TCG_TARGET_HAS_ext16u_i32
) {
163 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
168 t0
= tcg_const_i32(arg2
);
169 tcg_gen_and_i32(ret
, arg1
, t0
);
170 tcg_temp_free_i32(t0
);
173 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
175 /* Some cases can be optimized here. */
177 tcg_gen_movi_i32(ret
, -1);
178 } else if (arg2
== 0) {
179 tcg_gen_mov_i32(ret
, arg1
);
181 TCGv_i32 t0
= tcg_const_i32(arg2
);
182 tcg_gen_or_i32(ret
, arg1
, t0
);
183 tcg_temp_free_i32(t0
);
187 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_mov_i32(ret
, arg1
);
192 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
193 /* Don't recurse with tcg_gen_not_i32. */
194 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
196 TCGv_i32 t0
= tcg_const_i32(arg2
);
197 tcg_gen_xor_i32(ret
, arg1
, t0
);
198 tcg_temp_free_i32(t0
);
202 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
204 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
206 tcg_gen_mov_i32(ret
, arg1
);
208 TCGv_i32 t0
= tcg_const_i32(arg2
);
209 tcg_gen_shl_i32(ret
, arg1
, t0
);
210 tcg_temp_free_i32(t0
);
214 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
216 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
218 tcg_gen_mov_i32(ret
, arg1
);
220 TCGv_i32 t0
= tcg_const_i32(arg2
);
221 tcg_gen_shr_i32(ret
, arg1
, t0
);
222 tcg_temp_free_i32(t0
);
226 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
228 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
230 tcg_gen_mov_i32(ret
, arg1
);
232 TCGv_i32 t0
= tcg_const_i32(arg2
);
233 tcg_gen_sar_i32(ret
, arg1
, t0
);
234 tcg_temp_free_i32(t0
);
238 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
240 if (cond
== TCG_COND_ALWAYS
) {
242 } else if (cond
!= TCG_COND_NEVER
) {
243 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
247 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
249 if (cond
== TCG_COND_ALWAYS
) {
251 } else if (cond
!= TCG_COND_NEVER
) {
252 TCGv_i32 t0
= tcg_const_i32(arg2
);
253 tcg_gen_brcond_i32(cond
, arg1
, t0
, l
);
254 tcg_temp_free_i32(t0
);
258 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
259 TCGv_i32 arg1
, TCGv_i32 arg2
)
261 if (cond
== TCG_COND_ALWAYS
) {
262 tcg_gen_movi_i32(ret
, 1);
263 } else if (cond
== TCG_COND_NEVER
) {
264 tcg_gen_movi_i32(ret
, 0);
266 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
270 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
271 TCGv_i32 arg1
, int32_t arg2
)
273 TCGv_i32 t0
= tcg_const_i32(arg2
);
274 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
275 tcg_temp_free_i32(t0
);
278 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
281 tcg_gen_movi_i32(ret
, 0);
282 } else if (is_power_of_2(arg2
)) {
283 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
285 TCGv_i32 t0
= tcg_const_i32(arg2
);
286 tcg_gen_mul_i32(ret
, arg1
, t0
);
287 tcg_temp_free_i32(t0
);
291 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
293 if (TCG_TARGET_HAS_div_i32
) {
294 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
295 } else if (TCG_TARGET_HAS_div2_i32
) {
296 TCGv_i32 t0
= tcg_temp_new_i32();
297 tcg_gen_sari_i32(t0
, arg1
, 31);
298 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
299 tcg_temp_free_i32(t0
);
301 gen_helper_div_i32(ret
, arg1
, arg2
);
305 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
307 if (TCG_TARGET_HAS_rem_i32
) {
308 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
309 } else if (TCG_TARGET_HAS_div_i32
) {
310 TCGv_i32 t0
= tcg_temp_new_i32();
311 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
312 tcg_gen_mul_i32(t0
, t0
, arg2
);
313 tcg_gen_sub_i32(ret
, arg1
, t0
);
314 tcg_temp_free_i32(t0
);
315 } else if (TCG_TARGET_HAS_div2_i32
) {
316 TCGv_i32 t0
= tcg_temp_new_i32();
317 tcg_gen_sari_i32(t0
, arg1
, 31);
318 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
319 tcg_temp_free_i32(t0
);
321 gen_helper_rem_i32(ret
, arg1
, arg2
);
325 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
327 if (TCG_TARGET_HAS_div_i32
) {
328 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
329 } else if (TCG_TARGET_HAS_div2_i32
) {
330 TCGv_i32 t0
= tcg_temp_new_i32();
331 tcg_gen_movi_i32(t0
, 0);
332 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
333 tcg_temp_free_i32(t0
);
335 gen_helper_divu_i32(ret
, arg1
, arg2
);
339 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
341 if (TCG_TARGET_HAS_rem_i32
) {
342 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
343 } else if (TCG_TARGET_HAS_div_i32
) {
344 TCGv_i32 t0
= tcg_temp_new_i32();
345 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
346 tcg_gen_mul_i32(t0
, t0
, arg2
);
347 tcg_gen_sub_i32(ret
, arg1
, t0
);
348 tcg_temp_free_i32(t0
);
349 } else if (TCG_TARGET_HAS_div2_i32
) {
350 TCGv_i32 t0
= tcg_temp_new_i32();
351 tcg_gen_movi_i32(t0
, 0);
352 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
353 tcg_temp_free_i32(t0
);
355 gen_helper_remu_i32(ret
, arg1
, arg2
);
359 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
361 if (TCG_TARGET_HAS_andc_i32
) {
362 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
364 TCGv_i32 t0
= tcg_temp_new_i32();
365 tcg_gen_not_i32(t0
, arg2
);
366 tcg_gen_and_i32(ret
, arg1
, t0
);
367 tcg_temp_free_i32(t0
);
371 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
373 if (TCG_TARGET_HAS_eqv_i32
) {
374 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
376 tcg_gen_xor_i32(ret
, arg1
, arg2
);
377 tcg_gen_not_i32(ret
, ret
);
381 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
383 if (TCG_TARGET_HAS_nand_i32
) {
384 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
386 tcg_gen_and_i32(ret
, arg1
, arg2
);
387 tcg_gen_not_i32(ret
, ret
);
391 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
393 if (TCG_TARGET_HAS_nor_i32
) {
394 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
396 tcg_gen_or_i32(ret
, arg1
, arg2
);
397 tcg_gen_not_i32(ret
, ret
);
401 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
403 if (TCG_TARGET_HAS_orc_i32
) {
404 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
406 TCGv_i32 t0
= tcg_temp_new_i32();
407 tcg_gen_not_i32(t0
, arg2
);
408 tcg_gen_or_i32(ret
, arg1
, t0
);
409 tcg_temp_free_i32(t0
);
413 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
415 if (TCG_TARGET_HAS_clz_i32
) {
416 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
417 } else if (TCG_TARGET_HAS_clz_i64
) {
418 TCGv_i64 t1
= tcg_temp_new_i64();
419 TCGv_i64 t2
= tcg_temp_new_i64();
420 tcg_gen_extu_i32_i64(t1
, arg1
);
421 tcg_gen_extu_i32_i64(t2
, arg2
);
422 tcg_gen_addi_i64(t2
, t2
, 32);
423 tcg_gen_clz_i64(t1
, t1
, t2
);
424 tcg_gen_extrl_i64_i32(ret
, t1
);
425 tcg_temp_free_i64(t1
);
426 tcg_temp_free_i64(t2
);
427 tcg_gen_subi_i32(ret
, ret
, 32);
429 gen_helper_clz_i32(ret
, arg1
, arg2
);
433 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
435 TCGv_i32 t
= tcg_const_i32(arg2
);
436 tcg_gen_clz_i32(ret
, arg1
, t
);
437 tcg_temp_free_i32(t
);
440 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
442 if (TCG_TARGET_HAS_ctz_i32
) {
443 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
444 } else if (TCG_TARGET_HAS_ctz_i64
) {
445 TCGv_i64 t1
= tcg_temp_new_i64();
446 TCGv_i64 t2
= tcg_temp_new_i64();
447 tcg_gen_extu_i32_i64(t1
, arg1
);
448 tcg_gen_extu_i32_i64(t2
, arg2
);
449 tcg_gen_ctz_i64(t1
, t1
, t2
);
450 tcg_gen_extrl_i64_i32(ret
, t1
);
451 tcg_temp_free_i64(t1
);
452 tcg_temp_free_i64(t2
);
453 } else if (TCG_TARGET_HAS_ctpop_i32
454 || TCG_TARGET_HAS_ctpop_i64
455 || TCG_TARGET_HAS_clz_i32
456 || TCG_TARGET_HAS_clz_i64
) {
457 TCGv_i32 z
, t
= tcg_temp_new_i32();
459 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
460 tcg_gen_subi_i32(t
, arg1
, 1);
461 tcg_gen_andc_i32(t
, t
, arg1
);
462 tcg_gen_ctpop_i32(t
, t
);
464 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
465 tcg_gen_neg_i32(t
, arg1
);
466 tcg_gen_and_i32(t
, t
, arg1
);
467 tcg_gen_clzi_i32(t
, t
, 32);
468 tcg_gen_xori_i32(t
, t
, 31);
470 z
= tcg_const_i32(0);
471 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
472 tcg_temp_free_i32(t
);
473 tcg_temp_free_i32(z
);
475 gen_helper_ctz_i32(ret
, arg1
, arg2
);
479 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
481 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
482 /* This equivalence has the advantage of not requiring a fixup. */
483 TCGv_i32 t
= tcg_temp_new_i32();
484 tcg_gen_subi_i32(t
, arg1
, 1);
485 tcg_gen_andc_i32(t
, t
, arg1
);
486 tcg_gen_ctpop_i32(ret
, t
);
487 tcg_temp_free_i32(t
);
489 TCGv_i32 t
= tcg_const_i32(arg2
);
490 tcg_gen_ctz_i32(ret
, arg1
, t
);
491 tcg_temp_free_i32(t
);
495 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
497 if (TCG_TARGET_HAS_clz_i32
) {
498 TCGv_i32 t
= tcg_temp_new_i32();
499 tcg_gen_sari_i32(t
, arg
, 31);
500 tcg_gen_xor_i32(t
, t
, arg
);
501 tcg_gen_clzi_i32(t
, t
, 32);
502 tcg_gen_subi_i32(ret
, t
, 1);
503 tcg_temp_free_i32(t
);
505 gen_helper_clrsb_i32(ret
, arg
);
509 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
511 if (TCG_TARGET_HAS_ctpop_i32
) {
512 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
513 } else if (TCG_TARGET_HAS_ctpop_i64
) {
514 TCGv_i64 t
= tcg_temp_new_i64();
515 tcg_gen_extu_i32_i64(t
, arg1
);
516 tcg_gen_ctpop_i64(t
, t
);
517 tcg_gen_extrl_i64_i32(ret
, t
);
518 tcg_temp_free_i64(t
);
520 gen_helper_ctpop_i32(ret
, arg1
);
524 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
526 if (TCG_TARGET_HAS_rot_i32
) {
527 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
531 t0
= tcg_temp_new_i32();
532 t1
= tcg_temp_new_i32();
533 tcg_gen_shl_i32(t0
, arg1
, arg2
);
534 tcg_gen_subfi_i32(t1
, 32, arg2
);
535 tcg_gen_shr_i32(t1
, arg1
, t1
);
536 tcg_gen_or_i32(ret
, t0
, t1
);
537 tcg_temp_free_i32(t0
);
538 tcg_temp_free_i32(t1
);
542 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
544 tcg_debug_assert(arg2
< 32);
545 /* some cases can be optimized here */
547 tcg_gen_mov_i32(ret
, arg1
);
548 } else if (TCG_TARGET_HAS_rot_i32
) {
549 TCGv_i32 t0
= tcg_const_i32(arg2
);
550 tcg_gen_rotl_i32(ret
, arg1
, t0
);
551 tcg_temp_free_i32(t0
);
554 t0
= tcg_temp_new_i32();
555 t1
= tcg_temp_new_i32();
556 tcg_gen_shli_i32(t0
, arg1
, arg2
);
557 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
558 tcg_gen_or_i32(ret
, t0
, t1
);
559 tcg_temp_free_i32(t0
);
560 tcg_temp_free_i32(t1
);
564 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
566 if (TCG_TARGET_HAS_rot_i32
) {
567 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
571 t0
= tcg_temp_new_i32();
572 t1
= tcg_temp_new_i32();
573 tcg_gen_shr_i32(t0
, arg1
, arg2
);
574 tcg_gen_subfi_i32(t1
, 32, arg2
);
575 tcg_gen_shl_i32(t1
, arg1
, t1
);
576 tcg_gen_or_i32(ret
, t0
, t1
);
577 tcg_temp_free_i32(t0
);
578 tcg_temp_free_i32(t1
);
582 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
584 tcg_debug_assert(arg2
< 32);
585 /* some cases can be optimized here */
587 tcg_gen_mov_i32(ret
, arg1
);
589 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
593 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
594 unsigned int ofs
, unsigned int len
)
599 tcg_debug_assert(ofs
< 32);
600 tcg_debug_assert(len
> 0);
601 tcg_debug_assert(len
<= 32);
602 tcg_debug_assert(ofs
+ len
<= 32);
605 tcg_gen_mov_i32(ret
, arg2
);
608 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
609 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
613 mask
= (1u << len
) - 1;
614 t1
= tcg_temp_new_i32();
616 if (ofs
+ len
< 32) {
617 tcg_gen_andi_i32(t1
, arg2
, mask
);
618 tcg_gen_shli_i32(t1
, t1
, ofs
);
620 tcg_gen_shli_i32(t1
, arg2
, ofs
);
622 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
623 tcg_gen_or_i32(ret
, ret
, t1
);
625 tcg_temp_free_i32(t1
);
628 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
629 unsigned int ofs
, unsigned int len
)
631 tcg_debug_assert(ofs
< 32);
632 tcg_debug_assert(len
> 0);
633 tcg_debug_assert(len
<= 32);
634 tcg_debug_assert(ofs
+ len
<= 32);
636 if (ofs
+ len
== 32) {
637 tcg_gen_shli_i32(ret
, arg
, ofs
);
638 } else if (ofs
== 0) {
639 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
640 } else if (TCG_TARGET_HAS_deposit_i32
641 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
642 TCGv_i32 zero
= tcg_const_i32(0);
643 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
644 tcg_temp_free_i32(zero
);
646 /* To help two-operand hosts we prefer to zero-extend first,
647 which allows ARG to stay live. */
650 if (TCG_TARGET_HAS_ext16u_i32
) {
651 tcg_gen_ext16u_i32(ret
, arg
);
652 tcg_gen_shli_i32(ret
, ret
, ofs
);
657 if (TCG_TARGET_HAS_ext8u_i32
) {
658 tcg_gen_ext8u_i32(ret
, arg
);
659 tcg_gen_shli_i32(ret
, ret
, ofs
);
664 /* Otherwise prefer zero-extension over AND for code size. */
667 if (TCG_TARGET_HAS_ext16u_i32
) {
668 tcg_gen_shli_i32(ret
, arg
, ofs
);
669 tcg_gen_ext16u_i32(ret
, ret
);
674 if (TCG_TARGET_HAS_ext8u_i32
) {
675 tcg_gen_shli_i32(ret
, arg
, ofs
);
676 tcg_gen_ext8u_i32(ret
, ret
);
681 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
682 tcg_gen_shli_i32(ret
, ret
, ofs
);
686 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
687 unsigned int ofs
, unsigned int len
)
689 tcg_debug_assert(ofs
< 32);
690 tcg_debug_assert(len
> 0);
691 tcg_debug_assert(len
<= 32);
692 tcg_debug_assert(ofs
+ len
<= 32);
694 /* Canonicalize certain special cases, even if extract is supported. */
695 if (ofs
+ len
== 32) {
696 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
700 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
704 if (TCG_TARGET_HAS_extract_i32
705 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
706 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
710 /* Assume that zero-extension, if available, is cheaper than a shift. */
713 if (TCG_TARGET_HAS_ext16u_i32
) {
714 tcg_gen_ext16u_i32(ret
, arg
);
715 tcg_gen_shri_i32(ret
, ret
, ofs
);
720 if (TCG_TARGET_HAS_ext8u_i32
) {
721 tcg_gen_ext8u_i32(ret
, arg
);
722 tcg_gen_shri_i32(ret
, ret
, ofs
);
728 /* ??? Ideally we'd know what values are available for immediate AND.
729 Assume that 8 bits are available, plus the special case of 16,
730 so that we get ext8u, ext16u. */
732 case 1 ... 8: case 16:
733 tcg_gen_shri_i32(ret
, arg
, ofs
);
734 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
737 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
738 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
743 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
744 unsigned int ofs
, unsigned int len
)
746 tcg_debug_assert(ofs
< 32);
747 tcg_debug_assert(len
> 0);
748 tcg_debug_assert(len
<= 32);
749 tcg_debug_assert(ofs
+ len
<= 32);
751 /* Canonicalize certain special cases, even if extract is supported. */
752 if (ofs
+ len
== 32) {
753 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
759 tcg_gen_ext16s_i32(ret
, arg
);
762 tcg_gen_ext8s_i32(ret
, arg
);
767 if (TCG_TARGET_HAS_sextract_i32
768 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
769 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
773 /* Assume that sign-extension, if available, is cheaper than a shift. */
776 if (TCG_TARGET_HAS_ext16s_i32
) {
777 tcg_gen_ext16s_i32(ret
, arg
);
778 tcg_gen_sari_i32(ret
, ret
, ofs
);
783 if (TCG_TARGET_HAS_ext8s_i32
) {
784 tcg_gen_ext8s_i32(ret
, arg
);
785 tcg_gen_sari_i32(ret
, ret
, ofs
);
792 if (TCG_TARGET_HAS_ext16s_i32
) {
793 tcg_gen_shri_i32(ret
, arg
, ofs
);
794 tcg_gen_ext16s_i32(ret
, ret
);
799 if (TCG_TARGET_HAS_ext8s_i32
) {
800 tcg_gen_shri_i32(ret
, arg
, ofs
);
801 tcg_gen_ext8s_i32(ret
, ret
);
807 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
808 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
811 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
812 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
814 if (cond
== TCG_COND_ALWAYS
) {
815 tcg_gen_mov_i32(ret
, v1
);
816 } else if (cond
== TCG_COND_NEVER
) {
817 tcg_gen_mov_i32(ret
, v2
);
818 } else if (TCG_TARGET_HAS_movcond_i32
) {
819 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
821 TCGv_i32 t0
= tcg_temp_new_i32();
822 TCGv_i32 t1
= tcg_temp_new_i32();
823 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
824 tcg_gen_neg_i32(t0
, t0
);
825 tcg_gen_and_i32(t1
, v1
, t0
);
826 tcg_gen_andc_i32(ret
, v2
, t0
);
827 tcg_gen_or_i32(ret
, ret
, t1
);
828 tcg_temp_free_i32(t0
);
829 tcg_temp_free_i32(t1
);
833 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
834 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
836 if (TCG_TARGET_HAS_add2_i32
) {
837 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
839 TCGv_i64 t0
= tcg_temp_new_i64();
840 TCGv_i64 t1
= tcg_temp_new_i64();
841 tcg_gen_concat_i32_i64(t0
, al
, ah
);
842 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
843 tcg_gen_add_i64(t0
, t0
, t1
);
844 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
845 tcg_temp_free_i64(t0
);
846 tcg_temp_free_i64(t1
);
850 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
851 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
853 if (TCG_TARGET_HAS_sub2_i32
) {
854 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
856 TCGv_i64 t0
= tcg_temp_new_i64();
857 TCGv_i64 t1
= tcg_temp_new_i64();
858 tcg_gen_concat_i32_i64(t0
, al
, ah
);
859 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
860 tcg_gen_sub_i64(t0
, t0
, t1
);
861 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
862 tcg_temp_free_i64(t0
);
863 tcg_temp_free_i64(t1
);
867 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
869 if (TCG_TARGET_HAS_mulu2_i32
) {
870 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
871 } else if (TCG_TARGET_HAS_muluh_i32
) {
872 TCGv_i32 t
= tcg_temp_new_i32();
873 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
874 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
875 tcg_gen_mov_i32(rl
, t
);
876 tcg_temp_free_i32(t
);
878 TCGv_i64 t0
= tcg_temp_new_i64();
879 TCGv_i64 t1
= tcg_temp_new_i64();
880 tcg_gen_extu_i32_i64(t0
, arg1
);
881 tcg_gen_extu_i32_i64(t1
, arg2
);
882 tcg_gen_mul_i64(t0
, t0
, t1
);
883 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
884 tcg_temp_free_i64(t0
);
885 tcg_temp_free_i64(t1
);
889 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
891 if (TCG_TARGET_HAS_muls2_i32
) {
892 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
893 } else if (TCG_TARGET_HAS_mulsh_i32
) {
894 TCGv_i32 t
= tcg_temp_new_i32();
895 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
896 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
897 tcg_gen_mov_i32(rl
, t
);
898 tcg_temp_free_i32(t
);
899 } else if (TCG_TARGET_REG_BITS
== 32) {
900 TCGv_i32 t0
= tcg_temp_new_i32();
901 TCGv_i32 t1
= tcg_temp_new_i32();
902 TCGv_i32 t2
= tcg_temp_new_i32();
903 TCGv_i32 t3
= tcg_temp_new_i32();
904 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
905 /* Adjust for negative inputs. */
906 tcg_gen_sari_i32(t2
, arg1
, 31);
907 tcg_gen_sari_i32(t3
, arg2
, 31);
908 tcg_gen_and_i32(t2
, t2
, arg2
);
909 tcg_gen_and_i32(t3
, t3
, arg1
);
910 tcg_gen_sub_i32(rh
, t1
, t2
);
911 tcg_gen_sub_i32(rh
, rh
, t3
);
912 tcg_gen_mov_i32(rl
, t0
);
913 tcg_temp_free_i32(t0
);
914 tcg_temp_free_i32(t1
);
915 tcg_temp_free_i32(t2
);
916 tcg_temp_free_i32(t3
);
918 TCGv_i64 t0
= tcg_temp_new_i64();
919 TCGv_i64 t1
= tcg_temp_new_i64();
920 tcg_gen_ext_i32_i64(t0
, arg1
);
921 tcg_gen_ext_i32_i64(t1
, arg2
);
922 tcg_gen_mul_i64(t0
, t0
, t1
);
923 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
924 tcg_temp_free_i64(t0
);
925 tcg_temp_free_i64(t1
);
929 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
931 if (TCG_TARGET_REG_BITS
== 32) {
932 TCGv_i32 t0
= tcg_temp_new_i32();
933 TCGv_i32 t1
= tcg_temp_new_i32();
934 TCGv_i32 t2
= tcg_temp_new_i32();
935 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
936 /* Adjust for negative input for the signed arg1. */
937 tcg_gen_sari_i32(t2
, arg1
, 31);
938 tcg_gen_and_i32(t2
, t2
, arg2
);
939 tcg_gen_sub_i32(rh
, t1
, t2
);
940 tcg_gen_mov_i32(rl
, t0
);
941 tcg_temp_free_i32(t0
);
942 tcg_temp_free_i32(t1
);
943 tcg_temp_free_i32(t2
);
945 TCGv_i64 t0
= tcg_temp_new_i64();
946 TCGv_i64 t1
= tcg_temp_new_i64();
947 tcg_gen_ext_i32_i64(t0
, arg1
);
948 tcg_gen_extu_i32_i64(t1
, arg2
);
949 tcg_gen_mul_i64(t0
, t0
, t1
);
950 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
951 tcg_temp_free_i64(t0
);
952 tcg_temp_free_i64(t1
);
956 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
958 if (TCG_TARGET_HAS_ext8s_i32
) {
959 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
961 tcg_gen_shli_i32(ret
, arg
, 24);
962 tcg_gen_sari_i32(ret
, ret
, 24);
966 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
968 if (TCG_TARGET_HAS_ext16s_i32
) {
969 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
971 tcg_gen_shli_i32(ret
, arg
, 16);
972 tcg_gen_sari_i32(ret
, ret
, 16);
976 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
978 if (TCG_TARGET_HAS_ext8u_i32
) {
979 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
981 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
985 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
987 if (TCG_TARGET_HAS_ext16u_i32
) {
988 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
990 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
994 /* Note: we assume the two high bytes are set to zero */
995 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
997 if (TCG_TARGET_HAS_bswap16_i32
) {
998 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
1000 TCGv_i32 t0
= tcg_temp_new_i32();
1002 tcg_gen_ext8u_i32(t0
, arg
);
1003 tcg_gen_shli_i32(t0
, t0
, 8);
1004 tcg_gen_shri_i32(ret
, arg
, 8);
1005 tcg_gen_or_i32(ret
, ret
, t0
);
1006 tcg_temp_free_i32(t0
);
1010 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1012 if (TCG_TARGET_HAS_bswap32_i32
) {
1013 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
1016 t0
= tcg_temp_new_i32();
1017 t1
= tcg_temp_new_i32();
1019 tcg_gen_shli_i32(t0
, arg
, 24);
1021 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
1022 tcg_gen_shli_i32(t1
, t1
, 8);
1023 tcg_gen_or_i32(t0
, t0
, t1
);
1025 tcg_gen_shri_i32(t1
, arg
, 8);
1026 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
1027 tcg_gen_or_i32(t0
, t0
, t1
);
1029 tcg_gen_shri_i32(t1
, arg
, 24);
1030 tcg_gen_or_i32(ret
, t0
, t1
);
1031 tcg_temp_free_i32(t0
);
1032 tcg_temp_free_i32(t1
);
1038 #if TCG_TARGET_REG_BITS == 32
1039 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1041 void tcg_gen_discard_i64(TCGv_i64 arg
)
1043 tcg_gen_discard_i32(TCGV_LOW(arg
));
1044 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1047 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1049 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1050 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1053 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1055 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1056 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1059 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1061 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1062 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1065 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1067 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1068 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1071 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1073 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1074 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1077 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1079 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1080 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1083 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1085 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1086 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1089 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1091 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1092 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1095 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1097 /* Since arg2 and ret have different types,
1098 they cannot be the same temporary */
1099 #ifdef HOST_WORDS_BIGENDIAN
1100 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1101 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1103 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1104 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1108 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1110 #ifdef HOST_WORDS_BIGENDIAN
1111 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1112 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1114 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1115 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1119 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1121 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1122 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1125 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1127 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1128 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1131 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1133 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1134 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1137 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1139 gen_helper_shl_i64(ret
, arg1
, arg2
);
1142 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1144 gen_helper_shr_i64(ret
, arg1
, arg2
);
1147 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1149 gen_helper_sar_i64(ret
, arg1
, arg2
);
1152 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1157 t0
= tcg_temp_new_i64();
1158 t1
= tcg_temp_new_i32();
1160 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1161 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1163 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1164 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1165 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1166 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1168 tcg_gen_mov_i64(ret
, t0
);
1169 tcg_temp_free_i64(t0
);
1170 tcg_temp_free_i32(t1
);
1172 #endif /* TCG_TARGET_REG_SIZE == 32 */
1174 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1176 /* some cases can be optimized here */
1178 tcg_gen_mov_i64(ret
, arg1
);
1180 TCGv_i64 t0
= tcg_const_i64(arg2
);
1181 tcg_gen_add_i64(ret
, arg1
, t0
);
1182 tcg_temp_free_i64(t0
);
1186 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1188 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1189 /* Don't recurse with tcg_gen_neg_i64. */
1190 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1192 TCGv_i64 t0
= tcg_const_i64(arg1
);
1193 tcg_gen_sub_i64(ret
, t0
, arg2
);
1194 tcg_temp_free_i64(t0
);
1198 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1200 /* some cases can be optimized here */
1202 tcg_gen_mov_i64(ret
, arg1
);
1204 TCGv_i64 t0
= tcg_const_i64(arg2
);
1205 tcg_gen_sub_i64(ret
, arg1
, t0
);
1206 tcg_temp_free_i64(t0
);
1210 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1214 if (TCG_TARGET_REG_BITS
== 32) {
1215 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1216 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1220 /* Some cases can be optimized here. */
1223 tcg_gen_movi_i64(ret
, 0);
1226 tcg_gen_mov_i64(ret
, arg1
);
1229 /* Don't recurse with tcg_gen_ext8u_i64. */
1230 if (TCG_TARGET_HAS_ext8u_i64
) {
1231 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1236 if (TCG_TARGET_HAS_ext16u_i64
) {
1237 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1242 if (TCG_TARGET_HAS_ext32u_i64
) {
1243 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1248 t0
= tcg_const_i64(arg2
);
1249 tcg_gen_and_i64(ret
, arg1
, t0
);
1250 tcg_temp_free_i64(t0
);
1253 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1255 if (TCG_TARGET_REG_BITS
== 32) {
1256 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1257 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1260 /* Some cases can be optimized here. */
1262 tcg_gen_movi_i64(ret
, -1);
1263 } else if (arg2
== 0) {
1264 tcg_gen_mov_i64(ret
, arg1
);
1266 TCGv_i64 t0
= tcg_const_i64(arg2
);
1267 tcg_gen_or_i64(ret
, arg1
, t0
);
1268 tcg_temp_free_i64(t0
);
1272 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1274 if (TCG_TARGET_REG_BITS
== 32) {
1275 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1276 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1279 /* Some cases can be optimized here. */
1281 tcg_gen_mov_i64(ret
, arg1
);
1282 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1283 /* Don't recurse with tcg_gen_not_i64. */
1284 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1286 TCGv_i64 t0
= tcg_const_i64(arg2
);
1287 tcg_gen_xor_i64(ret
, arg1
, t0
);
1288 tcg_temp_free_i64(t0
);
1292 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1293 unsigned c
, bool right
, bool arith
)
1295 tcg_debug_assert(c
< 64);
1297 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1298 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1299 } else if (c
>= 32) {
1303 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1304 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1306 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1307 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1310 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1311 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1316 t0
= tcg_temp_new_i32();
1317 t1
= tcg_temp_new_i32();
1319 tcg_gen_shli_i32(t0
, TCGV_HIGH(arg1
), 32 - c
);
1321 tcg_gen_sari_i32(t1
, TCGV_HIGH(arg1
), c
);
1323 tcg_gen_shri_i32(t1
, TCGV_HIGH(arg1
), c
);
1325 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1326 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t0
);
1327 tcg_gen_mov_i32(TCGV_HIGH(ret
), t1
);
1329 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1330 /* Note: ret can be the same as arg1, so we use t1 */
1331 tcg_gen_shli_i32(t1
, TCGV_LOW(arg1
), c
);
1332 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1333 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t0
);
1334 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1336 tcg_temp_free_i32(t0
);
1337 tcg_temp_free_i32(t1
);
1341 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1343 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1344 if (TCG_TARGET_REG_BITS
== 32) {
1345 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1346 } else if (arg2
== 0) {
1347 tcg_gen_mov_i64(ret
, arg1
);
1349 TCGv_i64 t0
= tcg_const_i64(arg2
);
1350 tcg_gen_shl_i64(ret
, arg1
, t0
);
1351 tcg_temp_free_i64(t0
);
1355 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1357 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1358 if (TCG_TARGET_REG_BITS
== 32) {
1359 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1360 } else if (arg2
== 0) {
1361 tcg_gen_mov_i64(ret
, arg1
);
1363 TCGv_i64 t0
= tcg_const_i64(arg2
);
1364 tcg_gen_shr_i64(ret
, arg1
, t0
);
1365 tcg_temp_free_i64(t0
);
1369 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1371 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1372 if (TCG_TARGET_REG_BITS
== 32) {
1373 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1374 } else if (arg2
== 0) {
1375 tcg_gen_mov_i64(ret
, arg1
);
1377 TCGv_i64 t0
= tcg_const_i64(arg2
);
1378 tcg_gen_sar_i64(ret
, arg1
, t0
);
1379 tcg_temp_free_i64(t0
);
1383 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1385 if (cond
== TCG_COND_ALWAYS
) {
1387 } else if (cond
!= TCG_COND_NEVER
) {
1388 if (TCG_TARGET_REG_BITS
== 32) {
1389 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1390 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1391 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1393 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1399 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1401 if (cond
== TCG_COND_ALWAYS
) {
1403 } else if (cond
!= TCG_COND_NEVER
) {
1404 TCGv_i64 t0
= tcg_const_i64(arg2
);
1405 tcg_gen_brcond_i64(cond
, arg1
, t0
, l
);
1406 tcg_temp_free_i64(t0
);
1410 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1411 TCGv_i64 arg1
, TCGv_i64 arg2
)
1413 if (cond
== TCG_COND_ALWAYS
) {
1414 tcg_gen_movi_i64(ret
, 1);
1415 } else if (cond
== TCG_COND_NEVER
) {
1416 tcg_gen_movi_i64(ret
, 0);
1418 if (TCG_TARGET_REG_BITS
== 32) {
1419 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1420 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1421 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1422 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1424 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1429 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1430 TCGv_i64 arg1
, int64_t arg2
)
1432 TCGv_i64 t0
= tcg_const_i64(arg2
);
1433 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1434 tcg_temp_free_i64(t0
);
1437 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1440 tcg_gen_movi_i64(ret
, 0);
1441 } else if (is_power_of_2(arg2
)) {
1442 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1444 TCGv_i64 t0
= tcg_const_i64(arg2
);
1445 tcg_gen_mul_i64(ret
, arg1
, t0
);
1446 tcg_temp_free_i64(t0
);
1450 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1452 if (TCG_TARGET_HAS_div_i64
) {
1453 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1454 } else if (TCG_TARGET_HAS_div2_i64
) {
1455 TCGv_i64 t0
= tcg_temp_new_i64();
1456 tcg_gen_sari_i64(t0
, arg1
, 63);
1457 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1458 tcg_temp_free_i64(t0
);
1460 gen_helper_div_i64(ret
, arg1
, arg2
);
1464 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1466 if (TCG_TARGET_HAS_rem_i64
) {
1467 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1468 } else if (TCG_TARGET_HAS_div_i64
) {
1469 TCGv_i64 t0
= tcg_temp_new_i64();
1470 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1471 tcg_gen_mul_i64(t0
, t0
, arg2
);
1472 tcg_gen_sub_i64(ret
, arg1
, t0
);
1473 tcg_temp_free_i64(t0
);
1474 } else if (TCG_TARGET_HAS_div2_i64
) {
1475 TCGv_i64 t0
= tcg_temp_new_i64();
1476 tcg_gen_sari_i64(t0
, arg1
, 63);
1477 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1478 tcg_temp_free_i64(t0
);
1480 gen_helper_rem_i64(ret
, arg1
, arg2
);
1484 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1486 if (TCG_TARGET_HAS_div_i64
) {
1487 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1488 } else if (TCG_TARGET_HAS_div2_i64
) {
1489 TCGv_i64 t0
= tcg_temp_new_i64();
1490 tcg_gen_movi_i64(t0
, 0);
1491 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1492 tcg_temp_free_i64(t0
);
1494 gen_helper_divu_i64(ret
, arg1
, arg2
);
1498 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1500 if (TCG_TARGET_HAS_rem_i64
) {
1501 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1502 } else if (TCG_TARGET_HAS_div_i64
) {
1503 TCGv_i64 t0
= tcg_temp_new_i64();
1504 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1505 tcg_gen_mul_i64(t0
, t0
, arg2
);
1506 tcg_gen_sub_i64(ret
, arg1
, t0
);
1507 tcg_temp_free_i64(t0
);
1508 } else if (TCG_TARGET_HAS_div2_i64
) {
1509 TCGv_i64 t0
= tcg_temp_new_i64();
1510 tcg_gen_movi_i64(t0
, 0);
1511 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1512 tcg_temp_free_i64(t0
);
1514 gen_helper_remu_i64(ret
, arg1
, arg2
);
1518 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1520 if (TCG_TARGET_REG_BITS
== 32) {
1521 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1522 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1523 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1524 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1526 tcg_gen_shli_i64(ret
, arg
, 56);
1527 tcg_gen_sari_i64(ret
, ret
, 56);
1531 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1533 if (TCG_TARGET_REG_BITS
== 32) {
1534 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1535 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1536 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1537 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1539 tcg_gen_shli_i64(ret
, arg
, 48);
1540 tcg_gen_sari_i64(ret
, ret
, 48);
1544 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1546 if (TCG_TARGET_REG_BITS
== 32) {
1547 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1548 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1549 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1550 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1552 tcg_gen_shli_i64(ret
, arg
, 32);
1553 tcg_gen_sari_i64(ret
, ret
, 32);
1557 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1559 if (TCG_TARGET_REG_BITS
== 32) {
1560 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1561 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1562 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1563 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1565 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1569 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1571 if (TCG_TARGET_REG_BITS
== 32) {
1572 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1573 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1574 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1575 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1577 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1581 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1583 if (TCG_TARGET_REG_BITS
== 32) {
1584 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1585 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1586 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1587 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1589 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1593 /* Note: we assume the six high bytes are set to zero */
1594 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1596 if (TCG_TARGET_REG_BITS
== 32) {
1597 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1598 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1599 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1600 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1602 TCGv_i64 t0
= tcg_temp_new_i64();
1604 tcg_gen_ext8u_i64(t0
, arg
);
1605 tcg_gen_shli_i64(t0
, t0
, 8);
1606 tcg_gen_shri_i64(ret
, arg
, 8);
1607 tcg_gen_or_i64(ret
, ret
, t0
);
1608 tcg_temp_free_i64(t0
);
1612 /* Note: we assume the four high bytes are set to zero */
1613 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1615 if (TCG_TARGET_REG_BITS
== 32) {
1616 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1617 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1618 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1619 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1622 t0
= tcg_temp_new_i64();
1623 t1
= tcg_temp_new_i64();
1625 tcg_gen_shli_i64(t0
, arg
, 24);
1626 tcg_gen_ext32u_i64(t0
, t0
);
1628 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1629 tcg_gen_shli_i64(t1
, t1
, 8);
1630 tcg_gen_or_i64(t0
, t0
, t1
);
1632 tcg_gen_shri_i64(t1
, arg
, 8);
1633 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1634 tcg_gen_or_i64(t0
, t0
, t1
);
1636 tcg_gen_shri_i64(t1
, arg
, 24);
1637 tcg_gen_or_i64(ret
, t0
, t1
);
1638 tcg_temp_free_i64(t0
);
1639 tcg_temp_free_i64(t1
);
1643 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1645 if (TCG_TARGET_REG_BITS
== 32) {
1647 t0
= tcg_temp_new_i32();
1648 t1
= tcg_temp_new_i32();
1650 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1651 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1652 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1653 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1654 tcg_temp_free_i32(t0
);
1655 tcg_temp_free_i32(t1
);
1656 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1657 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1659 TCGv_i64 t0
= tcg_temp_new_i64();
1660 TCGv_i64 t1
= tcg_temp_new_i64();
1662 tcg_gen_shli_i64(t0
, arg
, 56);
1664 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1665 tcg_gen_shli_i64(t1
, t1
, 40);
1666 tcg_gen_or_i64(t0
, t0
, t1
);
1668 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1669 tcg_gen_shli_i64(t1
, t1
, 24);
1670 tcg_gen_or_i64(t0
, t0
, t1
);
1672 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1673 tcg_gen_shli_i64(t1
, t1
, 8);
1674 tcg_gen_or_i64(t0
, t0
, t1
);
1676 tcg_gen_shri_i64(t1
, arg
, 8);
1677 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1678 tcg_gen_or_i64(t0
, t0
, t1
);
1680 tcg_gen_shri_i64(t1
, arg
, 24);
1681 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1682 tcg_gen_or_i64(t0
, t0
, t1
);
1684 tcg_gen_shri_i64(t1
, arg
, 40);
1685 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1686 tcg_gen_or_i64(t0
, t0
, t1
);
1688 tcg_gen_shri_i64(t1
, arg
, 56);
1689 tcg_gen_or_i64(ret
, t0
, t1
);
1690 tcg_temp_free_i64(t0
);
1691 tcg_temp_free_i64(t1
);
1695 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1697 if (TCG_TARGET_REG_BITS
== 32) {
1698 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1699 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1700 } else if (TCG_TARGET_HAS_not_i64
) {
1701 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1703 tcg_gen_xori_i64(ret
, arg
, -1);
1707 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1709 if (TCG_TARGET_REG_BITS
== 32) {
1710 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1711 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1712 } else if (TCG_TARGET_HAS_andc_i64
) {
1713 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1715 TCGv_i64 t0
= tcg_temp_new_i64();
1716 tcg_gen_not_i64(t0
, arg2
);
1717 tcg_gen_and_i64(ret
, arg1
, t0
);
1718 tcg_temp_free_i64(t0
);
1722 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1724 if (TCG_TARGET_REG_BITS
== 32) {
1725 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1726 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1727 } else if (TCG_TARGET_HAS_eqv_i64
) {
1728 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1730 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1731 tcg_gen_not_i64(ret
, ret
);
1735 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1737 if (TCG_TARGET_REG_BITS
== 32) {
1738 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1739 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1740 } else if (TCG_TARGET_HAS_nand_i64
) {
1741 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1743 tcg_gen_and_i64(ret
, arg1
, arg2
);
1744 tcg_gen_not_i64(ret
, ret
);
1748 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1750 if (TCG_TARGET_REG_BITS
== 32) {
1751 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1752 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1753 } else if (TCG_TARGET_HAS_nor_i64
) {
1754 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1756 tcg_gen_or_i64(ret
, arg1
, arg2
);
1757 tcg_gen_not_i64(ret
, ret
);
1761 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1763 if (TCG_TARGET_REG_BITS
== 32) {
1764 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1765 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1766 } else if (TCG_TARGET_HAS_orc_i64
) {
1767 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1769 TCGv_i64 t0
= tcg_temp_new_i64();
1770 tcg_gen_not_i64(t0
, arg2
);
1771 tcg_gen_or_i64(ret
, arg1
, t0
);
1772 tcg_temp_free_i64(t0
);
1776 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1778 if (TCG_TARGET_HAS_clz_i64
) {
1779 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
1781 gen_helper_clz_i64(ret
, arg1
, arg2
);
1785 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1787 if (TCG_TARGET_REG_BITS
== 32
1788 && TCG_TARGET_HAS_clz_i32
1789 && arg2
<= 0xffffffffu
) {
1790 TCGv_i32 t
= tcg_const_i32((uint32_t)arg2
- 32);
1791 tcg_gen_clz_i32(t
, TCGV_LOW(arg1
), t
);
1792 tcg_gen_addi_i32(t
, t
, 32);
1793 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
1794 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1795 tcg_temp_free_i32(t
);
1797 TCGv_i64 t
= tcg_const_i64(arg2
);
1798 tcg_gen_clz_i64(ret
, arg1
, t
);
1799 tcg_temp_free_i64(t
);
1803 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1805 if (TCG_TARGET_HAS_ctz_i64
) {
1806 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
1807 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
1808 TCGv_i64 z
, t
= tcg_temp_new_i64();
1810 if (TCG_TARGET_HAS_ctpop_i64
) {
1811 tcg_gen_subi_i64(t
, arg1
, 1);
1812 tcg_gen_andc_i64(t
, t
, arg1
);
1813 tcg_gen_ctpop_i64(t
, t
);
1815 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
1816 tcg_gen_neg_i64(t
, arg1
);
1817 tcg_gen_and_i64(t
, t
, arg1
);
1818 tcg_gen_clzi_i64(t
, t
, 64);
1819 tcg_gen_xori_i64(t
, t
, 63);
1821 z
= tcg_const_i64(0);
1822 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
1823 tcg_temp_free_i64(t
);
1824 tcg_temp_free_i64(z
);
1826 gen_helper_ctz_i64(ret
, arg1
, arg2
);
1830 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1832 if (TCG_TARGET_REG_BITS
== 32
1833 && TCG_TARGET_HAS_ctz_i32
1834 && arg2
<= 0xffffffffu
) {
1835 TCGv_i32 t32
= tcg_const_i32((uint32_t)arg2
- 32);
1836 tcg_gen_ctz_i32(t32
, TCGV_HIGH(arg1
), t32
);
1837 tcg_gen_addi_i32(t32
, t32
, 32);
1838 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
1839 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1840 tcg_temp_free_i32(t32
);
1841 } else if (!TCG_TARGET_HAS_ctz_i64
1842 && TCG_TARGET_HAS_ctpop_i64
1844 /* This equivalence has the advantage of not requiring a fixup. */
1845 TCGv_i64 t
= tcg_temp_new_i64();
1846 tcg_gen_subi_i64(t
, arg1
, 1);
1847 tcg_gen_andc_i64(t
, t
, arg1
);
1848 tcg_gen_ctpop_i64(ret
, t
);
1849 tcg_temp_free_i64(t
);
1851 TCGv_i64 t64
= tcg_const_i64(arg2
);
1852 tcg_gen_ctz_i64(ret
, arg1
, t64
);
1853 tcg_temp_free_i64(t64
);
1857 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1859 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
1860 TCGv_i64 t
= tcg_temp_new_i64();
1861 tcg_gen_sari_i64(t
, arg
, 63);
1862 tcg_gen_xor_i64(t
, t
, arg
);
1863 tcg_gen_clzi_i64(t
, t
, 64);
1864 tcg_gen_subi_i64(ret
, t
, 1);
1865 tcg_temp_free_i64(t
);
1867 gen_helper_clrsb_i64(ret
, arg
);
1871 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
1873 if (TCG_TARGET_HAS_ctpop_i64
) {
1874 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
1875 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
1876 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1877 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1878 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
1879 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1881 gen_helper_ctpop_i64(ret
, arg1
);
1885 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1887 if (TCG_TARGET_HAS_rot_i64
) {
1888 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1891 t0
= tcg_temp_new_i64();
1892 t1
= tcg_temp_new_i64();
1893 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1894 tcg_gen_subfi_i64(t1
, 64, arg2
);
1895 tcg_gen_shr_i64(t1
, arg1
, t1
);
1896 tcg_gen_or_i64(ret
, t0
, t1
);
1897 tcg_temp_free_i64(t0
);
1898 tcg_temp_free_i64(t1
);
1902 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1904 tcg_debug_assert(arg2
< 64);
1905 /* some cases can be optimized here */
1907 tcg_gen_mov_i64(ret
, arg1
);
1908 } else if (TCG_TARGET_HAS_rot_i64
) {
1909 TCGv_i64 t0
= tcg_const_i64(arg2
);
1910 tcg_gen_rotl_i64(ret
, arg1
, t0
);
1911 tcg_temp_free_i64(t0
);
1914 t0
= tcg_temp_new_i64();
1915 t1
= tcg_temp_new_i64();
1916 tcg_gen_shli_i64(t0
, arg1
, arg2
);
1917 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
1918 tcg_gen_or_i64(ret
, t0
, t1
);
1919 tcg_temp_free_i64(t0
);
1920 tcg_temp_free_i64(t1
);
1924 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1926 if (TCG_TARGET_HAS_rot_i64
) {
1927 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
1930 t0
= tcg_temp_new_i64();
1931 t1
= tcg_temp_new_i64();
1932 tcg_gen_shr_i64(t0
, arg1
, arg2
);
1933 tcg_gen_subfi_i64(t1
, 64, arg2
);
1934 tcg_gen_shl_i64(t1
, arg1
, t1
);
1935 tcg_gen_or_i64(ret
, t0
, t1
);
1936 tcg_temp_free_i64(t0
);
1937 tcg_temp_free_i64(t1
);
1941 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1943 tcg_debug_assert(arg2
< 64);
1944 /* some cases can be optimized here */
1946 tcg_gen_mov_i64(ret
, arg1
);
1948 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
1952 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
1953 unsigned int ofs
, unsigned int len
)
1958 tcg_debug_assert(ofs
< 64);
1959 tcg_debug_assert(len
> 0);
1960 tcg_debug_assert(len
<= 64);
1961 tcg_debug_assert(ofs
+ len
<= 64);
1964 tcg_gen_mov_i64(ret
, arg2
);
1967 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
1968 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
1972 if (TCG_TARGET_REG_BITS
== 32) {
1974 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
1975 TCGV_LOW(arg2
), ofs
- 32, len
);
1976 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1979 if (ofs
+ len
<= 32) {
1980 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
1981 TCGV_LOW(arg2
), ofs
, len
);
1982 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1987 mask
= (1ull << len
) - 1;
1988 t1
= tcg_temp_new_i64();
1990 if (ofs
+ len
< 64) {
1991 tcg_gen_andi_i64(t1
, arg2
, mask
);
1992 tcg_gen_shli_i64(t1
, t1
, ofs
);
1994 tcg_gen_shli_i64(t1
, arg2
, ofs
);
1996 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
1997 tcg_gen_or_i64(ret
, ret
, t1
);
1999 tcg_temp_free_i64(t1
);
2002 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2003 unsigned int ofs
, unsigned int len
)
2005 tcg_debug_assert(ofs
< 64);
2006 tcg_debug_assert(len
> 0);
2007 tcg_debug_assert(len
<= 64);
2008 tcg_debug_assert(ofs
+ len
<= 64);
2010 if (ofs
+ len
== 64) {
2011 tcg_gen_shli_i64(ret
, arg
, ofs
);
2012 } else if (ofs
== 0) {
2013 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2014 } else if (TCG_TARGET_HAS_deposit_i64
2015 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2016 TCGv_i64 zero
= tcg_const_i64(0);
2017 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2018 tcg_temp_free_i64(zero
);
2020 if (TCG_TARGET_REG_BITS
== 32) {
2022 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2024 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2027 if (ofs
+ len
<= 32) {
2028 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2029 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2033 /* To help two-operand hosts we prefer to zero-extend first,
2034 which allows ARG to stay live. */
2037 if (TCG_TARGET_HAS_ext32u_i64
) {
2038 tcg_gen_ext32u_i64(ret
, arg
);
2039 tcg_gen_shli_i64(ret
, ret
, ofs
);
2044 if (TCG_TARGET_HAS_ext16u_i64
) {
2045 tcg_gen_ext16u_i64(ret
, arg
);
2046 tcg_gen_shli_i64(ret
, ret
, ofs
);
2051 if (TCG_TARGET_HAS_ext8u_i64
) {
2052 tcg_gen_ext8u_i64(ret
, arg
);
2053 tcg_gen_shli_i64(ret
, ret
, ofs
);
2058 /* Otherwise prefer zero-extension over AND for code size. */
2059 switch (ofs
+ len
) {
2061 if (TCG_TARGET_HAS_ext32u_i64
) {
2062 tcg_gen_shli_i64(ret
, arg
, ofs
);
2063 tcg_gen_ext32u_i64(ret
, ret
);
2068 if (TCG_TARGET_HAS_ext16u_i64
) {
2069 tcg_gen_shli_i64(ret
, arg
, ofs
);
2070 tcg_gen_ext16u_i64(ret
, ret
);
2075 if (TCG_TARGET_HAS_ext8u_i64
) {
2076 tcg_gen_shli_i64(ret
, arg
, ofs
);
2077 tcg_gen_ext8u_i64(ret
, ret
);
2082 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2083 tcg_gen_shli_i64(ret
, ret
, ofs
);
2087 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2088 unsigned int ofs
, unsigned int len
)
2090 tcg_debug_assert(ofs
< 64);
2091 tcg_debug_assert(len
> 0);
2092 tcg_debug_assert(len
<= 64);
2093 tcg_debug_assert(ofs
+ len
<= 64);
2095 /* Canonicalize certain special cases, even if extract is supported. */
2096 if (ofs
+ len
== 64) {
2097 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2101 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2105 if (TCG_TARGET_REG_BITS
== 32) {
2106 /* Look for a 32-bit extract within one of the two words. */
2108 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2109 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2112 if (ofs
+ len
<= 32) {
2113 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2114 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2117 /* The field is split across two words. One double-word
2118 shift is better than two double-word shifts. */
2122 if (TCG_TARGET_HAS_extract_i64
2123 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2124 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2128 /* Assume that zero-extension, if available, is cheaper than a shift. */
2129 switch (ofs
+ len
) {
2131 if (TCG_TARGET_HAS_ext32u_i64
) {
2132 tcg_gen_ext32u_i64(ret
, arg
);
2133 tcg_gen_shri_i64(ret
, ret
, ofs
);
2138 if (TCG_TARGET_HAS_ext16u_i64
) {
2139 tcg_gen_ext16u_i64(ret
, arg
);
2140 tcg_gen_shri_i64(ret
, ret
, ofs
);
2145 if (TCG_TARGET_HAS_ext8u_i64
) {
2146 tcg_gen_ext8u_i64(ret
, arg
);
2147 tcg_gen_shri_i64(ret
, ret
, ofs
);
2153 /* ??? Ideally we'd know what values are available for immediate AND.
2154 Assume that 8 bits are available, plus the special cases of 16 and 32,
2155 so that we get ext8u, ext16u, and ext32u. */
2157 case 1 ... 8: case 16: case 32:
2159 tcg_gen_shri_i64(ret
, arg
, ofs
);
2160 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2163 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2164 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2169 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2170 unsigned int ofs
, unsigned int len
)
2172 tcg_debug_assert(ofs
< 64);
2173 tcg_debug_assert(len
> 0);
2174 tcg_debug_assert(len
<= 64);
2175 tcg_debug_assert(ofs
+ len
<= 64);
2177 /* Canonicalize certain special cases, even if sextract is supported. */
2178 if (ofs
+ len
== 64) {
2179 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2185 tcg_gen_ext32s_i64(ret
, arg
);
2188 tcg_gen_ext16s_i64(ret
, arg
);
2191 tcg_gen_ext8s_i64(ret
, arg
);
2196 if (TCG_TARGET_REG_BITS
== 32) {
2197 /* Look for a 32-bit extract within one of the two words. */
2199 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2200 } else if (ofs
+ len
<= 32) {
2201 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2202 } else if (ofs
== 0) {
2203 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2204 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2206 } else if (len
> 32) {
2207 TCGv_i32 t
= tcg_temp_new_i32();
2208 /* Extract the bits for the high word normally. */
2209 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2210 /* Shift the field down for the low part. */
2211 tcg_gen_shri_i64(ret
, arg
, ofs
);
2212 /* Overwrite the shift into the high part. */
2213 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2214 tcg_temp_free_i32(t
);
2217 /* Shift the field down for the low part, such that the
2218 field sits at the MSB. */
2219 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2220 /* Shift the field down from the MSB, sign extending. */
2221 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2223 /* Sign-extend the field from 32 bits. */
2224 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2228 if (TCG_TARGET_HAS_sextract_i64
2229 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2230 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2234 /* Assume that sign-extension, if available, is cheaper than a shift. */
2235 switch (ofs
+ len
) {
2237 if (TCG_TARGET_HAS_ext32s_i64
) {
2238 tcg_gen_ext32s_i64(ret
, arg
);
2239 tcg_gen_sari_i64(ret
, ret
, ofs
);
2244 if (TCG_TARGET_HAS_ext16s_i64
) {
2245 tcg_gen_ext16s_i64(ret
, arg
);
2246 tcg_gen_sari_i64(ret
, ret
, ofs
);
2251 if (TCG_TARGET_HAS_ext8s_i64
) {
2252 tcg_gen_ext8s_i64(ret
, arg
);
2253 tcg_gen_sari_i64(ret
, ret
, ofs
);
2260 if (TCG_TARGET_HAS_ext32s_i64
) {
2261 tcg_gen_shri_i64(ret
, arg
, ofs
);
2262 tcg_gen_ext32s_i64(ret
, ret
);
2267 if (TCG_TARGET_HAS_ext16s_i64
) {
2268 tcg_gen_shri_i64(ret
, arg
, ofs
);
2269 tcg_gen_ext16s_i64(ret
, ret
);
2274 if (TCG_TARGET_HAS_ext8s_i64
) {
2275 tcg_gen_shri_i64(ret
, arg
, ofs
);
2276 tcg_gen_ext8s_i64(ret
, ret
);
2281 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2282 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2285 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2286 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2288 if (cond
== TCG_COND_ALWAYS
) {
2289 tcg_gen_mov_i64(ret
, v1
);
2290 } else if (cond
== TCG_COND_NEVER
) {
2291 tcg_gen_mov_i64(ret
, v2
);
2292 } else if (TCG_TARGET_REG_BITS
== 32) {
2293 TCGv_i32 t0
= tcg_temp_new_i32();
2294 TCGv_i32 t1
= tcg_temp_new_i32();
2295 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2296 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2297 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2299 if (TCG_TARGET_HAS_movcond_i32
) {
2300 tcg_gen_movi_i32(t1
, 0);
2301 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2302 TCGV_LOW(v1
), TCGV_LOW(v2
));
2303 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2304 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2306 tcg_gen_neg_i32(t0
, t0
);
2308 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2309 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2310 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2312 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2313 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2314 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2316 tcg_temp_free_i32(t0
);
2317 tcg_temp_free_i32(t1
);
2318 } else if (TCG_TARGET_HAS_movcond_i64
) {
2319 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2321 TCGv_i64 t0
= tcg_temp_new_i64();
2322 TCGv_i64 t1
= tcg_temp_new_i64();
2323 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2324 tcg_gen_neg_i64(t0
, t0
);
2325 tcg_gen_and_i64(t1
, v1
, t0
);
2326 tcg_gen_andc_i64(ret
, v2
, t0
);
2327 tcg_gen_or_i64(ret
, ret
, t1
);
2328 tcg_temp_free_i64(t0
);
2329 tcg_temp_free_i64(t1
);
2333 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2334 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2336 if (TCG_TARGET_HAS_add2_i64
) {
2337 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2339 TCGv_i64 t0
= tcg_temp_new_i64();
2340 TCGv_i64 t1
= tcg_temp_new_i64();
2341 tcg_gen_add_i64(t0
, al
, bl
);
2342 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2343 tcg_gen_add_i64(rh
, ah
, bh
);
2344 tcg_gen_add_i64(rh
, rh
, t1
);
2345 tcg_gen_mov_i64(rl
, t0
);
2346 tcg_temp_free_i64(t0
);
2347 tcg_temp_free_i64(t1
);
2351 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2352 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2354 if (TCG_TARGET_HAS_sub2_i64
) {
2355 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2357 TCGv_i64 t0
= tcg_temp_new_i64();
2358 TCGv_i64 t1
= tcg_temp_new_i64();
2359 tcg_gen_sub_i64(t0
, al
, bl
);
2360 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2361 tcg_gen_sub_i64(rh
, ah
, bh
);
2362 tcg_gen_sub_i64(rh
, rh
, t1
);
2363 tcg_gen_mov_i64(rl
, t0
);
2364 tcg_temp_free_i64(t0
);
2365 tcg_temp_free_i64(t1
);
2369 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2371 if (TCG_TARGET_HAS_mulu2_i64
) {
2372 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2373 } else if (TCG_TARGET_HAS_muluh_i64
) {
2374 TCGv_i64 t
= tcg_temp_new_i64();
2375 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2376 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2377 tcg_gen_mov_i64(rl
, t
);
2378 tcg_temp_free_i64(t
);
2380 TCGv_i64 t0
= tcg_temp_new_i64();
2381 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2382 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2383 tcg_gen_mov_i64(rl
, t0
);
2384 tcg_temp_free_i64(t0
);
2388 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2390 if (TCG_TARGET_HAS_muls2_i64
) {
2391 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2392 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2393 TCGv_i64 t
= tcg_temp_new_i64();
2394 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2395 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2396 tcg_gen_mov_i64(rl
, t
);
2397 tcg_temp_free_i64(t
);
2398 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2399 TCGv_i64 t0
= tcg_temp_new_i64();
2400 TCGv_i64 t1
= tcg_temp_new_i64();
2401 TCGv_i64 t2
= tcg_temp_new_i64();
2402 TCGv_i64 t3
= tcg_temp_new_i64();
2403 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2404 /* Adjust for negative inputs. */
2405 tcg_gen_sari_i64(t2
, arg1
, 63);
2406 tcg_gen_sari_i64(t3
, arg2
, 63);
2407 tcg_gen_and_i64(t2
, t2
, arg2
);
2408 tcg_gen_and_i64(t3
, t3
, arg1
);
2409 tcg_gen_sub_i64(rh
, t1
, t2
);
2410 tcg_gen_sub_i64(rh
, rh
, t3
);
2411 tcg_gen_mov_i64(rl
, t0
);
2412 tcg_temp_free_i64(t0
);
2413 tcg_temp_free_i64(t1
);
2414 tcg_temp_free_i64(t2
);
2415 tcg_temp_free_i64(t3
);
2417 TCGv_i64 t0
= tcg_temp_new_i64();
2418 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2419 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2420 tcg_gen_mov_i64(rl
, t0
);
2421 tcg_temp_free_i64(t0
);
2425 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2427 TCGv_i64 t0
= tcg_temp_new_i64();
2428 TCGv_i64 t1
= tcg_temp_new_i64();
2429 TCGv_i64 t2
= tcg_temp_new_i64();
2430 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2431 /* Adjust for negative input for the signed arg1. */
2432 tcg_gen_sari_i64(t2
, arg1
, 63);
2433 tcg_gen_and_i64(t2
, t2
, arg2
);
2434 tcg_gen_sub_i64(rh
, t1
, t2
);
2435 tcg_gen_mov_i64(rl
, t0
);
2436 tcg_temp_free_i64(t0
);
2437 tcg_temp_free_i64(t1
);
2438 tcg_temp_free_i64(t2
);
2441 /* Size changing operations. */
2443 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2445 if (TCG_TARGET_REG_BITS
== 32) {
2446 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2447 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
2448 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2449 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2451 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2455 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2457 if (TCG_TARGET_REG_BITS
== 32) {
2458 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2459 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
2460 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2461 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2463 TCGv_i64 t
= tcg_temp_new_i64();
2464 tcg_gen_shri_i64(t
, arg
, 32);
2465 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2466 tcg_temp_free_i64(t
);
2470 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2472 if (TCG_TARGET_REG_BITS
== 32) {
2473 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2474 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2476 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2477 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2481 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2483 if (TCG_TARGET_REG_BITS
== 32) {
2484 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2485 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2487 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2488 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2492 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2496 if (TCG_TARGET_REG_BITS
== 32) {
2497 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2498 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2502 tmp
= tcg_temp_new_i64();
2503 /* These extensions are only needed for type correctness.
2504 We may be able to do better given target specific information. */
2505 tcg_gen_extu_i32_i64(tmp
, high
);
2506 tcg_gen_extu_i32_i64(dest
, low
);
2507 /* If deposit is available, use it. Otherwise use the extra
2508 knowledge that we have of the zero-extensions above. */
2509 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2510 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2512 tcg_gen_shli_i64(tmp
, tmp
, 32);
2513 tcg_gen_or_i64(dest
, dest
, tmp
);
2515 tcg_temp_free_i64(tmp
);
2518 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2520 if (TCG_TARGET_REG_BITS
== 32) {
2521 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2522 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2524 tcg_gen_extrl_i64_i32(lo
, arg
);
2525 tcg_gen_extrh_i64_i32(hi
, arg
);
2529 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2531 tcg_gen_ext32u_i64(lo
, arg
);
2532 tcg_gen_shri_i64(hi
, arg
, 32);
2535 /* QEMU specific operations. */
2537 void tcg_gen_goto_tb(unsigned idx
)
2539 /* We only support two chained exits. */
2540 tcg_debug_assert(idx
<= 1);
2541 #ifdef CONFIG_DEBUG_TCG
2542 /* Verify that we havn't seen this numbered exit before. */
2543 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2544 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2546 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2549 void tcg_gen_lookup_and_goto_ptr(void)
2551 if (TCG_TARGET_HAS_goto_ptr
&& !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN
)) {
2552 TCGv_ptr ptr
= tcg_temp_new_ptr();
2553 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2554 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2555 tcg_temp_free_ptr(ptr
);
2561 static inline TCGMemOp
tcg_canonicalize_memop(TCGMemOp op
, bool is64
, bool st
)
2563 /* Trigger the asserts within as early as possible. */
2564 (void)get_alignment_bits(op
);
2566 switch (op
& MO_SIZE
) {
2589 static void gen_ldst_i32(TCGOpcode opc
, TCGv_i32 val
, TCGv addr
,
2590 TCGMemOp memop
, TCGArg idx
)
2592 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2593 #if TARGET_LONG_BITS == 32
2594 tcg_gen_op3i_i32(opc
, val
, addr
, oi
);
2596 if (TCG_TARGET_REG_BITS
== 32) {
2597 tcg_gen_op4i_i32(opc
, val
, TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2599 tcg_gen_op3(opc
, tcgv_i32_arg(val
), tcgv_i64_arg(addr
), oi
);
2604 static void gen_ldst_i64(TCGOpcode opc
, TCGv_i64 val
, TCGv addr
,
2605 TCGMemOp memop
, TCGArg idx
)
2607 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2608 #if TARGET_LONG_BITS == 32
2609 if (TCG_TARGET_REG_BITS
== 32) {
2610 tcg_gen_op4i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
), addr
, oi
);
2612 tcg_gen_op3(opc
, tcgv_i64_arg(val
), tcgv_i32_arg(addr
), oi
);
2615 if (TCG_TARGET_REG_BITS
== 32) {
2616 tcg_gen_op5i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
),
2617 TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2619 tcg_gen_op3i_i64(opc
, val
, addr
, oi
);
2624 static void tcg_gen_req_mo(TCGBar type
)
2626 #ifdef TCG_GUEST_DEFAULT_MO
2627 type
&= TCG_GUEST_DEFAULT_MO
;
2629 type
&= ~TCG_TARGET_DEFAULT_MO
;
2631 tcg_gen_mb(type
| TCG_BAR_SC
);
2635 void tcg_gen_qemu_ld_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2637 tcg_gen_req_mo(TCG_MO_LD_LD
| TCG_MO_ST_LD
);
2638 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2639 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2640 addr
, trace_mem_get_info(memop
, 0));
2641 gen_ldst_i32(INDEX_op_qemu_ld_i32
, val
, addr
, memop
, idx
);
2644 void tcg_gen_qemu_st_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2646 tcg_gen_req_mo(TCG_MO_LD_ST
| TCG_MO_ST_ST
);
2647 memop
= tcg_canonicalize_memop(memop
, 0, 1);
2648 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2649 addr
, trace_mem_get_info(memop
, 1));
2650 gen_ldst_i32(INDEX_op_qemu_st_i32
, val
, addr
, memop
, idx
);
2653 void tcg_gen_qemu_ld_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2655 tcg_gen_req_mo(TCG_MO_LD_LD
| TCG_MO_ST_LD
);
2656 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2657 tcg_gen_qemu_ld_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2658 if (memop
& MO_SIGN
) {
2659 tcg_gen_sari_i32(TCGV_HIGH(val
), TCGV_LOW(val
), 31);
2661 tcg_gen_movi_i32(TCGV_HIGH(val
), 0);
2666 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2667 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2668 addr
, trace_mem_get_info(memop
, 0));
2669 gen_ldst_i64(INDEX_op_qemu_ld_i64
, val
, addr
, memop
, idx
);
2672 void tcg_gen_qemu_st_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2674 tcg_gen_req_mo(TCG_MO_LD_ST
| TCG_MO_ST_ST
);
2675 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2676 tcg_gen_qemu_st_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2680 memop
= tcg_canonicalize_memop(memop
, 1, 1);
2681 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2682 addr
, trace_mem_get_info(memop
, 1));
2683 gen_ldst_i64(INDEX_op_qemu_st_i64
, val
, addr
, memop
, idx
);
2686 static void tcg_gen_ext_i32(TCGv_i32 ret
, TCGv_i32 val
, TCGMemOp opc
)
2688 switch (opc
& MO_SSIZE
) {
2690 tcg_gen_ext8s_i32(ret
, val
);
2693 tcg_gen_ext8u_i32(ret
, val
);
2696 tcg_gen_ext16s_i32(ret
, val
);
2699 tcg_gen_ext16u_i32(ret
, val
);
2702 tcg_gen_mov_i32(ret
, val
);
2707 static void tcg_gen_ext_i64(TCGv_i64 ret
, TCGv_i64 val
, TCGMemOp opc
)
2709 switch (opc
& MO_SSIZE
) {
2711 tcg_gen_ext8s_i64(ret
, val
);
2714 tcg_gen_ext8u_i64(ret
, val
);
2717 tcg_gen_ext16s_i64(ret
, val
);
2720 tcg_gen_ext16u_i64(ret
, val
);
2723 tcg_gen_ext32s_i64(ret
, val
);
2726 tcg_gen_ext32u_i64(ret
, val
);
2729 tcg_gen_mov_i64(ret
, val
);
2734 #ifdef CONFIG_SOFTMMU
2735 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2736 TCGv_i32
, TCGv_i32
, TCGv_i32
);
2737 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2738 TCGv_i64
, TCGv_i64
, TCGv_i32
);
2739 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2740 TCGv_i32
, TCGv_i32
);
2741 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2742 TCGv_i64
, TCGv_i32
);
2744 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
, TCGv_i32
);
2745 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
, TCGv_i64
);
2746 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
);
2747 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
);
2750 #ifdef CONFIG_ATOMIC64
2751 # define WITH_ATOMIC64(X) X,
2753 # define WITH_ATOMIC64(X)
2756 static void * const table_cmpxchg
[16] = {
2757 [MO_8
] = gen_helper_atomic_cmpxchgb
,
2758 [MO_16
| MO_LE
] = gen_helper_atomic_cmpxchgw_le
,
2759 [MO_16
| MO_BE
] = gen_helper_atomic_cmpxchgw_be
,
2760 [MO_32
| MO_LE
] = gen_helper_atomic_cmpxchgl_le
,
2761 [MO_32
| MO_BE
] = gen_helper_atomic_cmpxchgl_be
,
2762 WITH_ATOMIC64([MO_64
| MO_LE
] = gen_helper_atomic_cmpxchgq_le
)
2763 WITH_ATOMIC64([MO_64
| MO_BE
] = gen_helper_atomic_cmpxchgq_be
)
2766 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv
, TCGv addr
, TCGv_i32 cmpv
,
2767 TCGv_i32 newv
, TCGArg idx
, TCGMemOp memop
)
2769 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2771 if (!(tcg_ctx
->tb_cflags
& CF_PARALLEL
)) {
2772 TCGv_i32 t1
= tcg_temp_new_i32();
2773 TCGv_i32 t2
= tcg_temp_new_i32();
2775 tcg_gen_ext_i32(t2
, cmpv
, memop
& MO_SIZE
);
2777 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2778 tcg_gen_movcond_i32(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2779 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2780 tcg_temp_free_i32(t2
);
2782 if (memop
& MO_SIGN
) {
2783 tcg_gen_ext_i32(retv
, t1
, memop
);
2785 tcg_gen_mov_i32(retv
, t1
);
2787 tcg_temp_free_i32(t1
);
2789 gen_atomic_cx_i32 gen
;
2791 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2792 tcg_debug_assert(gen
!= NULL
);
2794 #ifdef CONFIG_SOFTMMU
2796 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2797 gen(retv
, cpu_env
, addr
, cmpv
, newv
, oi
);
2798 tcg_temp_free_i32(oi
);
2801 gen(retv
, cpu_env
, addr
, cmpv
, newv
);
2804 if (memop
& MO_SIGN
) {
2805 tcg_gen_ext_i32(retv
, retv
, memop
);
2810 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv
, TCGv addr
, TCGv_i64 cmpv
,
2811 TCGv_i64 newv
, TCGArg idx
, TCGMemOp memop
)
2813 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2815 if (!(tcg_ctx
->tb_cflags
& CF_PARALLEL
)) {
2816 TCGv_i64 t1
= tcg_temp_new_i64();
2817 TCGv_i64 t2
= tcg_temp_new_i64();
2819 tcg_gen_ext_i64(t2
, cmpv
, memop
& MO_SIZE
);
2821 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2822 tcg_gen_movcond_i64(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2823 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2824 tcg_temp_free_i64(t2
);
2826 if (memop
& MO_SIGN
) {
2827 tcg_gen_ext_i64(retv
, t1
, memop
);
2829 tcg_gen_mov_i64(retv
, t1
);
2831 tcg_temp_free_i64(t1
);
2832 } else if ((memop
& MO_SIZE
) == MO_64
) {
2833 #ifdef CONFIG_ATOMIC64
2834 gen_atomic_cx_i64 gen
;
2836 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2837 tcg_debug_assert(gen
!= NULL
);
2839 #ifdef CONFIG_SOFTMMU
2841 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
, idx
));
2842 gen(retv
, cpu_env
, addr
, cmpv
, newv
, oi
);
2843 tcg_temp_free_i32(oi
);
2846 gen(retv
, cpu_env
, addr
, cmpv
, newv
);
2849 gen_helper_exit_atomic(cpu_env
);
2850 /* Produce a result, so that we have a well-formed opcode stream
2851 with respect to uses of the result in the (dead) code following. */
2852 tcg_gen_movi_i64(retv
, 0);
2853 #endif /* CONFIG_ATOMIC64 */
2855 TCGv_i32 c32
= tcg_temp_new_i32();
2856 TCGv_i32 n32
= tcg_temp_new_i32();
2857 TCGv_i32 r32
= tcg_temp_new_i32();
2859 tcg_gen_extrl_i64_i32(c32
, cmpv
);
2860 tcg_gen_extrl_i64_i32(n32
, newv
);
2861 tcg_gen_atomic_cmpxchg_i32(r32
, addr
, c32
, n32
, idx
, memop
& ~MO_SIGN
);
2862 tcg_temp_free_i32(c32
);
2863 tcg_temp_free_i32(n32
);
2865 tcg_gen_extu_i32_i64(retv
, r32
);
2866 tcg_temp_free_i32(r32
);
2868 if (memop
& MO_SIGN
) {
2869 tcg_gen_ext_i64(retv
, retv
, memop
);
2874 static void do_nonatomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2875 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2876 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
2878 TCGv_i32 t1
= tcg_temp_new_i32();
2879 TCGv_i32 t2
= tcg_temp_new_i32();
2881 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2883 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2885 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2887 tcg_gen_ext_i32(ret
, (new_val
? t2
: t1
), memop
);
2888 tcg_temp_free_i32(t1
);
2889 tcg_temp_free_i32(t2
);
2892 static void do_atomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2893 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2895 gen_atomic_op_i32 gen
;
2897 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2899 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2900 tcg_debug_assert(gen
!= NULL
);
2902 #ifdef CONFIG_SOFTMMU
2904 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2905 gen(ret
, cpu_env
, addr
, val
, oi
);
2906 tcg_temp_free_i32(oi
);
2909 gen(ret
, cpu_env
, addr
, val
);
2912 if (memop
& MO_SIGN
) {
2913 tcg_gen_ext_i32(ret
, ret
, memop
);
2917 static void do_nonatomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2918 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2919 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
2921 TCGv_i64 t1
= tcg_temp_new_i64();
2922 TCGv_i64 t2
= tcg_temp_new_i64();
2924 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2926 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2928 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2930 tcg_gen_ext_i64(ret
, (new_val
? t2
: t1
), memop
);
2931 tcg_temp_free_i64(t1
);
2932 tcg_temp_free_i64(t2
);
2935 static void do_atomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2936 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2938 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2940 if ((memop
& MO_SIZE
) == MO_64
) {
2941 #ifdef CONFIG_ATOMIC64
2942 gen_atomic_op_i64 gen
;
2944 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2945 tcg_debug_assert(gen
!= NULL
);
2947 #ifdef CONFIG_SOFTMMU
2949 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2950 gen(ret
, cpu_env
, addr
, val
, oi
);
2951 tcg_temp_free_i32(oi
);
2954 gen(ret
, cpu_env
, addr
, val
);
2957 gen_helper_exit_atomic(cpu_env
);
2958 /* Produce a result, so that we have a well-formed opcode stream
2959 with respect to uses of the result in the (dead) code following. */
2960 tcg_gen_movi_i64(ret
, 0);
2961 #endif /* CONFIG_ATOMIC64 */
2963 TCGv_i32 v32
= tcg_temp_new_i32();
2964 TCGv_i32 r32
= tcg_temp_new_i32();
2966 tcg_gen_extrl_i64_i32(v32
, val
);
2967 do_atomic_op_i32(r32
, addr
, v32
, idx
, memop
& ~MO_SIGN
, table
);
2968 tcg_temp_free_i32(v32
);
2970 tcg_gen_extu_i32_i64(ret
, r32
);
2971 tcg_temp_free_i32(r32
);
2973 if (memop
& MO_SIGN
) {
2974 tcg_gen_ext_i64(ret
, ret
, memop
);
2979 #define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2980 static void * const table_##NAME[16] = { \
2981 [MO_8] = gen_helper_atomic_##NAME##b, \
2982 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2983 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2984 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2985 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
2986 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2987 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
2989 void tcg_gen_atomic_##NAME##_i32 \
2990 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
2992 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
2993 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
2995 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
2996 tcg_gen_##OP##_i32); \
2999 void tcg_gen_atomic_##NAME##_i64 \
3000 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
3002 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
3003 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
3005 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
3006 tcg_gen_##OP##_i64); \
3010 GEN_ATOMIC_HELPER(fetch_add
, add
, 0)
3011 GEN_ATOMIC_HELPER(fetch_and
, and, 0)
3012 GEN_ATOMIC_HELPER(fetch_or
, or, 0)
3013 GEN_ATOMIC_HELPER(fetch_xor
, xor, 0)
3015 GEN_ATOMIC_HELPER(add_fetch
, add
, 1)
3016 GEN_ATOMIC_HELPER(and_fetch
, and, 1)
3017 GEN_ATOMIC_HELPER(or_fetch
, or, 1)
3018 GEN_ATOMIC_HELPER(xor_fetch
, xor, 1)
3020 static void tcg_gen_mov2_i32(TCGv_i32 r
, TCGv_i32 a
, TCGv_i32 b
)
3022 tcg_gen_mov_i32(r
, b
);
3025 static void tcg_gen_mov2_i64(TCGv_i64 r
, TCGv_i64 a
, TCGv_i64 b
)
3027 tcg_gen_mov_i64(r
, b
);
3030 GEN_ATOMIC_HELPER(xchg
, mov2
, 0)
3032 #undef GEN_ATOMIC_HELPER