2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
34 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
36 TCGOp
*op
= tcg_emit_op(opc
, 1);
40 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
42 TCGOp
*op
= tcg_emit_op(opc
, 2);
47 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
49 TCGOp
*op
= tcg_emit_op(opc
, 3);
55 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
57 TCGOp
*op
= tcg_emit_op(opc
, 4);
64 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
67 TCGOp
*op
= tcg_emit_op(opc
, 5);
75 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
76 TCGArg a4
, TCGArg a5
, TCGArg a6
)
78 TCGOp
*op
= tcg_emit_op(opc
, 6);
89 static void add_last_as_label_use(TCGLabel
*l
)
91 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
93 u
->op
= tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
97 void tcg_gen_br(TCGLabel
*l
)
99 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
100 add_last_as_label_use(l
);
103 void tcg_gen_mb(TCGBar mb_type
)
105 #ifdef CONFIG_USER_ONLY
106 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
109 * It is tempting to elide the barrier in a uniprocessor context.
110 * However, even with a single cpu we have i/o threads running in
111 * parallel, and lack of memory order can result in e.g. virtio
112 * queue entries being read incorrectly.
114 bool parallel
= true;
118 tcg_gen_op1(INDEX_op_mb
, mb_type
);
124 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
126 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
129 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
131 /* some cases can be optimized here */
133 tcg_gen_mov_i32(ret
, arg1
);
135 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
139 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
141 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
142 /* Don't recurse with tcg_gen_neg_i32. */
143 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
145 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
149 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
151 /* some cases can be optimized here */
153 tcg_gen_mov_i32(ret
, arg1
);
155 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
159 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
161 /* Some cases can be optimized here. */
164 tcg_gen_movi_i32(ret
, 0);
167 tcg_gen_mov_i32(ret
, arg1
);
170 /* Don't recurse with tcg_gen_ext8u_i32. */
171 if (TCG_TARGET_HAS_ext8u_i32
) {
172 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
177 if (TCG_TARGET_HAS_ext16u_i32
) {
178 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
184 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
187 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_movi_i32(ret
, -1);
192 } else if (arg2
== 0) {
193 tcg_gen_mov_i32(ret
, arg1
);
195 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
199 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
201 /* Some cases can be optimized here. */
203 tcg_gen_mov_i32(ret
, arg1
);
204 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
205 /* Don't recurse with tcg_gen_not_i32. */
206 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
208 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
212 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
214 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
216 tcg_gen_mov_i32(ret
, arg1
);
218 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
222 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
224 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
226 tcg_gen_mov_i32(ret
, arg1
);
228 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
232 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
234 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
236 tcg_gen_mov_i32(ret
, arg1
);
238 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
242 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
244 if (cond
== TCG_COND_ALWAYS
) {
246 } else if (cond
!= TCG_COND_NEVER
) {
247 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
248 add_last_as_label_use(l
);
252 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
254 if (cond
== TCG_COND_ALWAYS
) {
256 } else if (cond
!= TCG_COND_NEVER
) {
257 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
261 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
262 TCGv_i32 arg1
, TCGv_i32 arg2
)
264 if (cond
== TCG_COND_ALWAYS
) {
265 tcg_gen_movi_i32(ret
, 1);
266 } else if (cond
== TCG_COND_NEVER
) {
267 tcg_gen_movi_i32(ret
, 0);
269 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
273 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
274 TCGv_i32 arg1
, int32_t arg2
)
276 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
279 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
282 tcg_gen_movi_i32(ret
, 0);
283 } else if (is_power_of_2(arg2
)) {
284 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
286 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
290 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
292 if (TCG_TARGET_HAS_div_i32
) {
293 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
294 } else if (TCG_TARGET_HAS_div2_i32
) {
295 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
296 tcg_gen_sari_i32(t0
, arg1
, 31);
297 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
298 tcg_temp_free_i32(t0
);
300 gen_helper_div_i32(ret
, arg1
, arg2
);
304 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
306 if (TCG_TARGET_HAS_rem_i32
) {
307 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
308 } else if (TCG_TARGET_HAS_div_i32
) {
309 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
310 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
311 tcg_gen_mul_i32(t0
, t0
, arg2
);
312 tcg_gen_sub_i32(ret
, arg1
, t0
);
313 tcg_temp_free_i32(t0
);
314 } else if (TCG_TARGET_HAS_div2_i32
) {
315 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
316 tcg_gen_sari_i32(t0
, arg1
, 31);
317 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
318 tcg_temp_free_i32(t0
);
320 gen_helper_rem_i32(ret
, arg1
, arg2
);
324 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
326 if (TCG_TARGET_HAS_div_i32
) {
327 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
328 } else if (TCG_TARGET_HAS_div2_i32
) {
329 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
330 tcg_gen_movi_i32(t0
, 0);
331 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
332 tcg_temp_free_i32(t0
);
334 gen_helper_divu_i32(ret
, arg1
, arg2
);
338 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
340 if (TCG_TARGET_HAS_rem_i32
) {
341 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
342 } else if (TCG_TARGET_HAS_div_i32
) {
343 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
344 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
345 tcg_gen_mul_i32(t0
, t0
, arg2
);
346 tcg_gen_sub_i32(ret
, arg1
, t0
);
347 tcg_temp_free_i32(t0
);
348 } else if (TCG_TARGET_HAS_div2_i32
) {
349 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
350 tcg_gen_movi_i32(t0
, 0);
351 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
352 tcg_temp_free_i32(t0
);
354 gen_helper_remu_i32(ret
, arg1
, arg2
);
358 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
360 if (TCG_TARGET_HAS_andc_i32
) {
361 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
363 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
364 tcg_gen_not_i32(t0
, arg2
);
365 tcg_gen_and_i32(ret
, arg1
, t0
);
366 tcg_temp_free_i32(t0
);
370 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
372 if (TCG_TARGET_HAS_eqv_i32
) {
373 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
375 tcg_gen_xor_i32(ret
, arg1
, arg2
);
376 tcg_gen_not_i32(ret
, ret
);
380 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
382 if (TCG_TARGET_HAS_nand_i32
) {
383 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
385 tcg_gen_and_i32(ret
, arg1
, arg2
);
386 tcg_gen_not_i32(ret
, ret
);
390 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
392 if (TCG_TARGET_HAS_nor_i32
) {
393 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
395 tcg_gen_or_i32(ret
, arg1
, arg2
);
396 tcg_gen_not_i32(ret
, ret
);
400 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
402 if (TCG_TARGET_HAS_orc_i32
) {
403 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
405 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
406 tcg_gen_not_i32(t0
, arg2
);
407 tcg_gen_or_i32(ret
, arg1
, t0
);
408 tcg_temp_free_i32(t0
);
412 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
414 if (TCG_TARGET_HAS_clz_i32
) {
415 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
416 } else if (TCG_TARGET_HAS_clz_i64
) {
417 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
418 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
419 tcg_gen_extu_i32_i64(t1
, arg1
);
420 tcg_gen_extu_i32_i64(t2
, arg2
);
421 tcg_gen_addi_i64(t2
, t2
, 32);
422 tcg_gen_clz_i64(t1
, t1
, t2
);
423 tcg_gen_extrl_i64_i32(ret
, t1
);
424 tcg_temp_free_i64(t1
);
425 tcg_temp_free_i64(t2
);
426 tcg_gen_subi_i32(ret
, ret
, 32);
428 gen_helper_clz_i32(ret
, arg1
, arg2
);
432 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
434 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
437 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
439 if (TCG_TARGET_HAS_ctz_i32
) {
440 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
441 } else if (TCG_TARGET_HAS_ctz_i64
) {
442 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
443 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
444 tcg_gen_extu_i32_i64(t1
, arg1
);
445 tcg_gen_extu_i32_i64(t2
, arg2
);
446 tcg_gen_ctz_i64(t1
, t1
, t2
);
447 tcg_gen_extrl_i64_i32(ret
, t1
);
448 tcg_temp_free_i64(t1
);
449 tcg_temp_free_i64(t2
);
450 } else if (TCG_TARGET_HAS_ctpop_i32
451 || TCG_TARGET_HAS_ctpop_i64
452 || TCG_TARGET_HAS_clz_i32
453 || TCG_TARGET_HAS_clz_i64
) {
454 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
456 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
457 tcg_gen_subi_i32(t
, arg1
, 1);
458 tcg_gen_andc_i32(t
, t
, arg1
);
459 tcg_gen_ctpop_i32(t
, t
);
461 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
462 tcg_gen_neg_i32(t
, arg1
);
463 tcg_gen_and_i32(t
, t
, arg1
);
464 tcg_gen_clzi_i32(t
, t
, 32);
465 tcg_gen_xori_i32(t
, t
, 31);
467 z
= tcg_constant_i32(0);
468 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
469 tcg_temp_free_i32(t
);
471 gen_helper_ctz_i32(ret
, arg1
, arg2
);
475 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
477 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
478 /* This equivalence has the advantage of not requiring a fixup. */
479 TCGv_i32 t
= tcg_temp_ebb_new_i32();
480 tcg_gen_subi_i32(t
, arg1
, 1);
481 tcg_gen_andc_i32(t
, t
, arg1
);
482 tcg_gen_ctpop_i32(ret
, t
);
483 tcg_temp_free_i32(t
);
485 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
489 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
491 if (TCG_TARGET_HAS_clz_i32
) {
492 TCGv_i32 t
= tcg_temp_ebb_new_i32();
493 tcg_gen_sari_i32(t
, arg
, 31);
494 tcg_gen_xor_i32(t
, t
, arg
);
495 tcg_gen_clzi_i32(t
, t
, 32);
496 tcg_gen_subi_i32(ret
, t
, 1);
497 tcg_temp_free_i32(t
);
499 gen_helper_clrsb_i32(ret
, arg
);
503 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
505 if (TCG_TARGET_HAS_ctpop_i32
) {
506 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
507 } else if (TCG_TARGET_HAS_ctpop_i64
) {
508 TCGv_i64 t
= tcg_temp_ebb_new_i64();
509 tcg_gen_extu_i32_i64(t
, arg1
);
510 tcg_gen_ctpop_i64(t
, t
);
511 tcg_gen_extrl_i64_i32(ret
, t
);
512 tcg_temp_free_i64(t
);
514 gen_helper_ctpop_i32(ret
, arg1
);
518 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
520 if (TCG_TARGET_HAS_rot_i32
) {
521 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
525 t0
= tcg_temp_ebb_new_i32();
526 t1
= tcg_temp_ebb_new_i32();
527 tcg_gen_shl_i32(t0
, arg1
, arg2
);
528 tcg_gen_subfi_i32(t1
, 32, arg2
);
529 tcg_gen_shr_i32(t1
, arg1
, t1
);
530 tcg_gen_or_i32(ret
, t0
, t1
);
531 tcg_temp_free_i32(t0
);
532 tcg_temp_free_i32(t1
);
536 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
538 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
539 /* some cases can be optimized here */
541 tcg_gen_mov_i32(ret
, arg1
);
542 } else if (TCG_TARGET_HAS_rot_i32
) {
543 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
546 t0
= tcg_temp_ebb_new_i32();
547 t1
= tcg_temp_ebb_new_i32();
548 tcg_gen_shli_i32(t0
, arg1
, arg2
);
549 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
550 tcg_gen_or_i32(ret
, t0
, t1
);
551 tcg_temp_free_i32(t0
);
552 tcg_temp_free_i32(t1
);
556 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
558 if (TCG_TARGET_HAS_rot_i32
) {
559 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
563 t0
= tcg_temp_ebb_new_i32();
564 t1
= tcg_temp_ebb_new_i32();
565 tcg_gen_shr_i32(t0
, arg1
, arg2
);
566 tcg_gen_subfi_i32(t1
, 32, arg2
);
567 tcg_gen_shl_i32(t1
, arg1
, t1
);
568 tcg_gen_or_i32(ret
, t0
, t1
);
569 tcg_temp_free_i32(t0
);
570 tcg_temp_free_i32(t1
);
574 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
576 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
577 /* some cases can be optimized here */
579 tcg_gen_mov_i32(ret
, arg1
);
581 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
585 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
586 unsigned int ofs
, unsigned int len
)
591 tcg_debug_assert(ofs
< 32);
592 tcg_debug_assert(len
> 0);
593 tcg_debug_assert(len
<= 32);
594 tcg_debug_assert(ofs
+ len
<= 32);
597 tcg_gen_mov_i32(ret
, arg2
);
600 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
601 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
605 t1
= tcg_temp_ebb_new_i32();
607 if (TCG_TARGET_HAS_extract2_i32
) {
608 if (ofs
+ len
== 32) {
609 tcg_gen_shli_i32(t1
, arg1
, len
);
610 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
614 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
615 tcg_gen_rotli_i32(ret
, ret
, len
);
620 mask
= (1u << len
) - 1;
621 if (ofs
+ len
< 32) {
622 tcg_gen_andi_i32(t1
, arg2
, mask
);
623 tcg_gen_shli_i32(t1
, t1
, ofs
);
625 tcg_gen_shli_i32(t1
, arg2
, ofs
);
627 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
628 tcg_gen_or_i32(ret
, ret
, t1
);
630 tcg_temp_free_i32(t1
);
633 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
634 unsigned int ofs
, unsigned int len
)
636 tcg_debug_assert(ofs
< 32);
637 tcg_debug_assert(len
> 0);
638 tcg_debug_assert(len
<= 32);
639 tcg_debug_assert(ofs
+ len
<= 32);
641 if (ofs
+ len
== 32) {
642 tcg_gen_shli_i32(ret
, arg
, ofs
);
643 } else if (ofs
== 0) {
644 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
645 } else if (TCG_TARGET_HAS_deposit_i32
646 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
647 TCGv_i32 zero
= tcg_constant_i32(0);
648 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
650 /* To help two-operand hosts we prefer to zero-extend first,
651 which allows ARG to stay live. */
654 if (TCG_TARGET_HAS_ext16u_i32
) {
655 tcg_gen_ext16u_i32(ret
, arg
);
656 tcg_gen_shli_i32(ret
, ret
, ofs
);
661 if (TCG_TARGET_HAS_ext8u_i32
) {
662 tcg_gen_ext8u_i32(ret
, arg
);
663 tcg_gen_shli_i32(ret
, ret
, ofs
);
668 /* Otherwise prefer zero-extension over AND for code size. */
671 if (TCG_TARGET_HAS_ext16u_i32
) {
672 tcg_gen_shli_i32(ret
, arg
, ofs
);
673 tcg_gen_ext16u_i32(ret
, ret
);
678 if (TCG_TARGET_HAS_ext8u_i32
) {
679 tcg_gen_shli_i32(ret
, arg
, ofs
);
680 tcg_gen_ext8u_i32(ret
, ret
);
685 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
686 tcg_gen_shli_i32(ret
, ret
, ofs
);
690 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
691 unsigned int ofs
, unsigned int len
)
693 tcg_debug_assert(ofs
< 32);
694 tcg_debug_assert(len
> 0);
695 tcg_debug_assert(len
<= 32);
696 tcg_debug_assert(ofs
+ len
<= 32);
698 /* Canonicalize certain special cases, even if extract is supported. */
699 if (ofs
+ len
== 32) {
700 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
704 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
708 if (TCG_TARGET_HAS_extract_i32
709 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
710 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
714 /* Assume that zero-extension, if available, is cheaper than a shift. */
717 if (TCG_TARGET_HAS_ext16u_i32
) {
718 tcg_gen_ext16u_i32(ret
, arg
);
719 tcg_gen_shri_i32(ret
, ret
, ofs
);
724 if (TCG_TARGET_HAS_ext8u_i32
) {
725 tcg_gen_ext8u_i32(ret
, arg
);
726 tcg_gen_shri_i32(ret
, ret
, ofs
);
732 /* ??? Ideally we'd know what values are available for immediate AND.
733 Assume that 8 bits are available, plus the special case of 16,
734 so that we get ext8u, ext16u. */
736 case 1 ... 8: case 16:
737 tcg_gen_shri_i32(ret
, arg
, ofs
);
738 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
741 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
742 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
747 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
748 unsigned int ofs
, unsigned int len
)
750 tcg_debug_assert(ofs
< 32);
751 tcg_debug_assert(len
> 0);
752 tcg_debug_assert(len
<= 32);
753 tcg_debug_assert(ofs
+ len
<= 32);
755 /* Canonicalize certain special cases, even if extract is supported. */
756 if (ofs
+ len
== 32) {
757 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
763 tcg_gen_ext16s_i32(ret
, arg
);
766 tcg_gen_ext8s_i32(ret
, arg
);
771 if (TCG_TARGET_HAS_sextract_i32
772 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
773 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
777 /* Assume that sign-extension, if available, is cheaper than a shift. */
780 if (TCG_TARGET_HAS_ext16s_i32
) {
781 tcg_gen_ext16s_i32(ret
, arg
);
782 tcg_gen_sari_i32(ret
, ret
, ofs
);
787 if (TCG_TARGET_HAS_ext8s_i32
) {
788 tcg_gen_ext8s_i32(ret
, arg
);
789 tcg_gen_sari_i32(ret
, ret
, ofs
);
796 if (TCG_TARGET_HAS_ext16s_i32
) {
797 tcg_gen_shri_i32(ret
, arg
, ofs
);
798 tcg_gen_ext16s_i32(ret
, ret
);
803 if (TCG_TARGET_HAS_ext8s_i32
) {
804 tcg_gen_shri_i32(ret
, arg
, ofs
);
805 tcg_gen_ext8s_i32(ret
, ret
);
811 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
812 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
816 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
817 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
819 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
822 tcg_debug_assert(ofs
<= 32);
824 tcg_gen_mov_i32(ret
, al
);
825 } else if (ofs
== 32) {
826 tcg_gen_mov_i32(ret
, ah
);
827 } else if (al
== ah
) {
828 tcg_gen_rotri_i32(ret
, al
, ofs
);
829 } else if (TCG_TARGET_HAS_extract2_i32
) {
830 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
832 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
833 tcg_gen_shri_i32(t0
, al
, ofs
);
834 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
835 tcg_temp_free_i32(t0
);
839 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
840 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
842 if (cond
== TCG_COND_ALWAYS
) {
843 tcg_gen_mov_i32(ret
, v1
);
844 } else if (cond
== TCG_COND_NEVER
) {
845 tcg_gen_mov_i32(ret
, v2
);
846 } else if (TCG_TARGET_HAS_movcond_i32
) {
847 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
849 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
850 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
851 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
852 tcg_gen_neg_i32(t0
, t0
);
853 tcg_gen_and_i32(t1
, v1
, t0
);
854 tcg_gen_andc_i32(ret
, v2
, t0
);
855 tcg_gen_or_i32(ret
, ret
, t1
);
856 tcg_temp_free_i32(t0
);
857 tcg_temp_free_i32(t1
);
861 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
862 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
864 if (TCG_TARGET_HAS_add2_i32
) {
865 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
867 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
868 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
869 tcg_gen_concat_i32_i64(t0
, al
, ah
);
870 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
871 tcg_gen_add_i64(t0
, t0
, t1
);
872 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
873 tcg_temp_free_i64(t0
);
874 tcg_temp_free_i64(t1
);
878 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
879 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
881 if (TCG_TARGET_HAS_sub2_i32
) {
882 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
884 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
885 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
886 tcg_gen_concat_i32_i64(t0
, al
, ah
);
887 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
888 tcg_gen_sub_i64(t0
, t0
, t1
);
889 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
890 tcg_temp_free_i64(t0
);
891 tcg_temp_free_i64(t1
);
895 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
897 if (TCG_TARGET_HAS_mulu2_i32
) {
898 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
899 } else if (TCG_TARGET_HAS_muluh_i32
) {
900 TCGv_i32 t
= tcg_temp_ebb_new_i32();
901 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
902 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
903 tcg_gen_mov_i32(rl
, t
);
904 tcg_temp_free_i32(t
);
905 } else if (TCG_TARGET_REG_BITS
== 64) {
906 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
907 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
908 tcg_gen_extu_i32_i64(t0
, arg1
);
909 tcg_gen_extu_i32_i64(t1
, arg2
);
910 tcg_gen_mul_i64(t0
, t0
, t1
);
911 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
912 tcg_temp_free_i64(t0
);
913 tcg_temp_free_i64(t1
);
915 qemu_build_not_reached();
919 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
921 if (TCG_TARGET_HAS_muls2_i32
) {
922 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
923 } else if (TCG_TARGET_HAS_mulsh_i32
) {
924 TCGv_i32 t
= tcg_temp_ebb_new_i32();
925 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
926 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
927 tcg_gen_mov_i32(rl
, t
);
928 tcg_temp_free_i32(t
);
929 } else if (TCG_TARGET_REG_BITS
== 32) {
930 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
931 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
932 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
933 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
934 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
935 /* Adjust for negative inputs. */
936 tcg_gen_sari_i32(t2
, arg1
, 31);
937 tcg_gen_sari_i32(t3
, arg2
, 31);
938 tcg_gen_and_i32(t2
, t2
, arg2
);
939 tcg_gen_and_i32(t3
, t3
, arg1
);
940 tcg_gen_sub_i32(rh
, t1
, t2
);
941 tcg_gen_sub_i32(rh
, rh
, t3
);
942 tcg_gen_mov_i32(rl
, t0
);
943 tcg_temp_free_i32(t0
);
944 tcg_temp_free_i32(t1
);
945 tcg_temp_free_i32(t2
);
946 tcg_temp_free_i32(t3
);
948 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
949 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
950 tcg_gen_ext_i32_i64(t0
, arg1
);
951 tcg_gen_ext_i32_i64(t1
, arg2
);
952 tcg_gen_mul_i64(t0
, t0
, t1
);
953 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
954 tcg_temp_free_i64(t0
);
955 tcg_temp_free_i64(t1
);
959 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
961 if (TCG_TARGET_REG_BITS
== 32) {
962 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
963 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
964 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
965 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
966 /* Adjust for negative input for the signed arg1. */
967 tcg_gen_sari_i32(t2
, arg1
, 31);
968 tcg_gen_and_i32(t2
, t2
, arg2
);
969 tcg_gen_sub_i32(rh
, t1
, t2
);
970 tcg_gen_mov_i32(rl
, t0
);
971 tcg_temp_free_i32(t0
);
972 tcg_temp_free_i32(t1
);
973 tcg_temp_free_i32(t2
);
975 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
976 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
977 tcg_gen_ext_i32_i64(t0
, arg1
);
978 tcg_gen_extu_i32_i64(t1
, arg2
);
979 tcg_gen_mul_i64(t0
, t0
, t1
);
980 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
981 tcg_temp_free_i64(t0
);
982 tcg_temp_free_i64(t1
);
986 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
988 if (TCG_TARGET_HAS_ext8s_i32
) {
989 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
991 tcg_gen_shli_i32(ret
, arg
, 24);
992 tcg_gen_sari_i32(ret
, ret
, 24);
996 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
998 if (TCG_TARGET_HAS_ext16s_i32
) {
999 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1001 tcg_gen_shli_i32(ret
, arg
, 16);
1002 tcg_gen_sari_i32(ret
, ret
, 16);
1006 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1008 if (TCG_TARGET_HAS_ext8u_i32
) {
1009 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1011 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1015 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1017 if (TCG_TARGET_HAS_ext16u_i32
) {
1018 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1020 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1024 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1026 /* Only one extension flag may be present. */
1027 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1029 if (TCG_TARGET_HAS_bswap16_i32
) {
1030 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1032 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1033 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1035 tcg_gen_shri_i32(t0
, arg
, 8);
1036 if (!(flags
& TCG_BSWAP_IZ
)) {
1037 tcg_gen_ext8u_i32(t0
, t0
);
1040 if (flags
& TCG_BSWAP_OS
) {
1041 tcg_gen_shli_i32(t1
, arg
, 24);
1042 tcg_gen_sari_i32(t1
, t1
, 16);
1043 } else if (flags
& TCG_BSWAP_OZ
) {
1044 tcg_gen_ext8u_i32(t1
, arg
);
1045 tcg_gen_shli_i32(t1
, t1
, 8);
1047 tcg_gen_shli_i32(t1
, arg
, 8);
1050 tcg_gen_or_i32(ret
, t0
, t1
);
1051 tcg_temp_free_i32(t0
);
1052 tcg_temp_free_i32(t1
);
1056 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1058 if (TCG_TARGET_HAS_bswap32_i32
) {
1059 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1061 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1062 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1063 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1066 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1067 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1068 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1069 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1070 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1072 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1073 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1074 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1076 tcg_temp_free_i32(t0
);
1077 tcg_temp_free_i32(t1
);
1081 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1083 /* Swapping 2 16-bit elements is a rotate. */
1084 tcg_gen_rotli_i32(ret
, arg
, 16);
1087 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1089 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1092 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1094 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1097 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1099 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1102 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1104 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1107 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1109 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1111 tcg_gen_sari_i32(t
, a
, 31);
1112 tcg_gen_xor_i32(ret
, a
, t
);
1113 tcg_gen_sub_i32(ret
, ret
, t
);
1114 tcg_temp_free_i32(t
);
1119 #if TCG_TARGET_REG_BITS == 32
1120 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1122 void tcg_gen_discard_i64(TCGv_i64 arg
)
1124 tcg_gen_discard_i32(TCGV_LOW(arg
));
1125 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1128 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1130 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1132 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1133 if (ts
->kind
== TEMP_CONST
) {
1134 tcg_gen_movi_i64(ret
, ts
->val
);
1136 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1137 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1141 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1143 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1144 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1147 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1149 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1150 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1153 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1155 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1156 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1159 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1161 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1162 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1165 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1167 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1168 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1171 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1173 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1174 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1177 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1179 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1180 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1183 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1185 /* Since arg2 and ret have different types,
1186 they cannot be the same temporary */
1188 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1189 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1191 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1192 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1196 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1198 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1201 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1203 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1206 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1208 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1211 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1214 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1215 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1217 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1218 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1222 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1224 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1225 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1228 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1230 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1231 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1234 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1236 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1237 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1240 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1242 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1243 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1246 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1248 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1249 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1252 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1254 gen_helper_shl_i64(ret
, arg1
, arg2
);
1257 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1259 gen_helper_shr_i64(ret
, arg1
, arg2
);
1262 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1264 gen_helper_sar_i64(ret
, arg1
, arg2
);
1267 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1272 t0
= tcg_temp_ebb_new_i64();
1273 t1
= tcg_temp_ebb_new_i32();
1275 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1276 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1278 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1279 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1280 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1281 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1283 tcg_gen_mov_i64(ret
, t0
);
1284 tcg_temp_free_i64(t0
);
1285 tcg_temp_free_i32(t1
);
1290 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1292 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1295 #endif /* TCG_TARGET_REG_SIZE == 32 */
1297 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1299 /* some cases can be optimized here */
1301 tcg_gen_mov_i64(ret
, arg1
);
1302 } else if (TCG_TARGET_REG_BITS
== 64) {
1303 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1305 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1306 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1307 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1311 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1313 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1314 /* Don't recurse with tcg_gen_neg_i64. */
1315 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1316 } else if (TCG_TARGET_REG_BITS
== 64) {
1317 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1319 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1320 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1321 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1325 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1327 /* some cases can be optimized here */
1329 tcg_gen_mov_i64(ret
, arg1
);
1330 } else if (TCG_TARGET_REG_BITS
== 64) {
1331 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1333 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1334 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1335 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1339 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1341 if (TCG_TARGET_REG_BITS
== 32) {
1342 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1343 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1347 /* Some cases can be optimized here. */
1350 tcg_gen_movi_i64(ret
, 0);
1353 tcg_gen_mov_i64(ret
, arg1
);
1356 /* Don't recurse with tcg_gen_ext8u_i64. */
1357 if (TCG_TARGET_HAS_ext8u_i64
) {
1358 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1363 if (TCG_TARGET_HAS_ext16u_i64
) {
1364 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1369 if (TCG_TARGET_HAS_ext32u_i64
) {
1370 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1376 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1379 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1381 if (TCG_TARGET_REG_BITS
== 32) {
1382 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1383 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1386 /* Some cases can be optimized here. */
1388 tcg_gen_movi_i64(ret
, -1);
1389 } else if (arg2
== 0) {
1390 tcg_gen_mov_i64(ret
, arg1
);
1392 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1396 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1398 if (TCG_TARGET_REG_BITS
== 32) {
1399 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1400 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1403 /* Some cases can be optimized here. */
1405 tcg_gen_mov_i64(ret
, arg1
);
1406 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1407 /* Don't recurse with tcg_gen_not_i64. */
1408 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1410 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1414 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1415 unsigned c
, bool right
, bool arith
)
1417 tcg_debug_assert(c
< 64);
1419 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1420 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1421 } else if (c
>= 32) {
1425 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1426 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1428 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1429 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1432 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1433 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1436 if (TCG_TARGET_HAS_extract2_i32
) {
1437 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1438 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1440 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1441 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1442 TCGV_HIGH(arg1
), 32 - c
, c
);
1445 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1447 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1450 if (TCG_TARGET_HAS_extract2_i32
) {
1451 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1452 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1454 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1455 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1456 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1457 TCGV_HIGH(arg1
), c
, 32 - c
);
1458 tcg_temp_free_i32(t0
);
1460 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1464 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1466 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1467 if (TCG_TARGET_REG_BITS
== 32) {
1468 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1469 } else if (arg2
== 0) {
1470 tcg_gen_mov_i64(ret
, arg1
);
1472 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1476 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1478 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1479 if (TCG_TARGET_REG_BITS
== 32) {
1480 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1481 } else if (arg2
== 0) {
1482 tcg_gen_mov_i64(ret
, arg1
);
1484 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1488 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1490 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1491 if (TCG_TARGET_REG_BITS
== 32) {
1492 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1493 } else if (arg2
== 0) {
1494 tcg_gen_mov_i64(ret
, arg1
);
1496 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1500 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1502 if (cond
== TCG_COND_ALWAYS
) {
1504 } else if (cond
!= TCG_COND_NEVER
) {
1505 if (TCG_TARGET_REG_BITS
== 32) {
1506 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1507 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1508 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1510 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1513 add_last_as_label_use(l
);
1517 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1519 if (TCG_TARGET_REG_BITS
== 64) {
1520 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1521 } else if (cond
== TCG_COND_ALWAYS
) {
1523 } else if (cond
!= TCG_COND_NEVER
) {
1524 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1525 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1526 tcg_constant_i32(arg2
),
1527 tcg_constant_i32(arg2
>> 32),
1528 cond
, label_arg(l
));
1529 add_last_as_label_use(l
);
1533 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1534 TCGv_i64 arg1
, TCGv_i64 arg2
)
1536 if (cond
== TCG_COND_ALWAYS
) {
1537 tcg_gen_movi_i64(ret
, 1);
1538 } else if (cond
== TCG_COND_NEVER
) {
1539 tcg_gen_movi_i64(ret
, 0);
1541 if (TCG_TARGET_REG_BITS
== 32) {
1542 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1543 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1544 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1545 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1547 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1552 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1553 TCGv_i64 arg1
, int64_t arg2
)
1555 if (TCG_TARGET_REG_BITS
== 64) {
1556 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1557 } else if (cond
== TCG_COND_ALWAYS
) {
1558 tcg_gen_movi_i64(ret
, 1);
1559 } else if (cond
== TCG_COND_NEVER
) {
1560 tcg_gen_movi_i64(ret
, 0);
1562 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1563 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1564 tcg_constant_i32(arg2
),
1565 tcg_constant_i32(arg2
>> 32), cond
);
1566 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1570 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1573 tcg_gen_movi_i64(ret
, 0);
1574 } else if (is_power_of_2(arg2
)) {
1575 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1577 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1581 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1583 if (TCG_TARGET_HAS_div_i64
) {
1584 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1585 } else if (TCG_TARGET_HAS_div2_i64
) {
1586 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1587 tcg_gen_sari_i64(t0
, arg1
, 63);
1588 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1589 tcg_temp_free_i64(t0
);
1591 gen_helper_div_i64(ret
, arg1
, arg2
);
1595 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1597 if (TCG_TARGET_HAS_rem_i64
) {
1598 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1599 } else if (TCG_TARGET_HAS_div_i64
) {
1600 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1601 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1602 tcg_gen_mul_i64(t0
, t0
, arg2
);
1603 tcg_gen_sub_i64(ret
, arg1
, t0
);
1604 tcg_temp_free_i64(t0
);
1605 } else if (TCG_TARGET_HAS_div2_i64
) {
1606 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1607 tcg_gen_sari_i64(t0
, arg1
, 63);
1608 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1609 tcg_temp_free_i64(t0
);
1611 gen_helper_rem_i64(ret
, arg1
, arg2
);
1615 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1617 if (TCG_TARGET_HAS_div_i64
) {
1618 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1619 } else if (TCG_TARGET_HAS_div2_i64
) {
1620 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1621 tcg_gen_movi_i64(t0
, 0);
1622 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1623 tcg_temp_free_i64(t0
);
1625 gen_helper_divu_i64(ret
, arg1
, arg2
);
1629 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1631 if (TCG_TARGET_HAS_rem_i64
) {
1632 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1633 } else if (TCG_TARGET_HAS_div_i64
) {
1634 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1635 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1636 tcg_gen_mul_i64(t0
, t0
, arg2
);
1637 tcg_gen_sub_i64(ret
, arg1
, t0
);
1638 tcg_temp_free_i64(t0
);
1639 } else if (TCG_TARGET_HAS_div2_i64
) {
1640 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1641 tcg_gen_movi_i64(t0
, 0);
1642 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1643 tcg_temp_free_i64(t0
);
1645 gen_helper_remu_i64(ret
, arg1
, arg2
);
1649 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1651 if (TCG_TARGET_REG_BITS
== 32) {
1652 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1653 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1654 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1655 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1657 tcg_gen_shli_i64(ret
, arg
, 56);
1658 tcg_gen_sari_i64(ret
, ret
, 56);
1662 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1664 if (TCG_TARGET_REG_BITS
== 32) {
1665 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1666 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1667 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1668 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1670 tcg_gen_shli_i64(ret
, arg
, 48);
1671 tcg_gen_sari_i64(ret
, ret
, 48);
1675 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1677 if (TCG_TARGET_REG_BITS
== 32) {
1678 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1679 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1680 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1681 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1683 tcg_gen_shli_i64(ret
, arg
, 32);
1684 tcg_gen_sari_i64(ret
, ret
, 32);
1688 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1690 if (TCG_TARGET_REG_BITS
== 32) {
1691 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1692 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1693 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1694 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1696 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1700 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1702 if (TCG_TARGET_REG_BITS
== 32) {
1703 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1704 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1705 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1706 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1708 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1712 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1714 if (TCG_TARGET_REG_BITS
== 32) {
1715 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1716 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1717 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1718 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1720 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1724 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1726 /* Only one extension flag may be present. */
1727 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1729 if (TCG_TARGET_REG_BITS
== 32) {
1730 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
1731 if (flags
& TCG_BSWAP_OS
) {
1732 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1734 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1736 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1737 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
1739 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1740 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1742 tcg_gen_shri_i64(t0
, arg
, 8);
1743 if (!(flags
& TCG_BSWAP_IZ
)) {
1744 tcg_gen_ext8u_i64(t0
, t0
);
1747 if (flags
& TCG_BSWAP_OS
) {
1748 tcg_gen_shli_i64(t1
, arg
, 56);
1749 tcg_gen_sari_i64(t1
, t1
, 48);
1750 } else if (flags
& TCG_BSWAP_OZ
) {
1751 tcg_gen_ext8u_i64(t1
, arg
);
1752 tcg_gen_shli_i64(t1
, t1
, 8);
1754 tcg_gen_shli_i64(t1
, arg
, 8);
1757 tcg_gen_or_i64(ret
, t0
, t1
);
1758 tcg_temp_free_i64(t0
);
1759 tcg_temp_free_i64(t1
);
1763 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1765 /* Only one extension flag may be present. */
1766 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1768 if (TCG_TARGET_REG_BITS
== 32) {
1769 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1770 if (flags
& TCG_BSWAP_OS
) {
1771 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1773 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1775 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1776 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
1778 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1779 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1780 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
1782 /* arg = xxxxabcd */
1783 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
1784 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
1785 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
1786 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
1787 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
1789 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
1790 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
1791 if (flags
& TCG_BSWAP_OS
) {
1792 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
1794 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
1796 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba */
1798 tcg_temp_free_i64(t0
);
1799 tcg_temp_free_i64(t1
);
1803 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1805 if (TCG_TARGET_REG_BITS
== 32) {
1807 t0
= tcg_temp_ebb_new_i32();
1808 t1
= tcg_temp_ebb_new_i32();
1810 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1811 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1812 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1813 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1814 tcg_temp_free_i32(t0
);
1815 tcg_temp_free_i32(t1
);
1816 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1817 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
1819 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1820 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1821 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
1823 /* arg = abcdefgh */
1824 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
1825 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
1826 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
1827 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
1828 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
1829 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
1831 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
1832 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
1833 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
1834 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
1835 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
1836 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
1838 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
1839 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
1840 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
1842 tcg_temp_free_i64(t0
);
1843 tcg_temp_free_i64(t1
);
1844 tcg_temp_free_i64(t2
);
1848 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1850 uint64_t m
= 0x0000ffff0000ffffull
;
1851 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1852 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1854 /* See include/qemu/bitops.h, hswap64. */
1855 tcg_gen_rotli_i64(t1
, arg
, 32);
1856 tcg_gen_andi_i64(t0
, t1
, m
);
1857 tcg_gen_shli_i64(t0
, t0
, 16);
1858 tcg_gen_shri_i64(t1
, t1
, 16);
1859 tcg_gen_andi_i64(t1
, t1
, m
);
1860 tcg_gen_or_i64(ret
, t0
, t1
);
1862 tcg_temp_free_i64(t0
);
1863 tcg_temp_free_i64(t1
);
1866 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1868 /* Swapping 2 32-bit elements is a rotate. */
1869 tcg_gen_rotli_i64(ret
, arg
, 32);
1872 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1874 if (TCG_TARGET_REG_BITS
== 32) {
1875 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1876 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1877 } else if (TCG_TARGET_HAS_not_i64
) {
1878 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1880 tcg_gen_xori_i64(ret
, arg
, -1);
1884 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1886 if (TCG_TARGET_REG_BITS
== 32) {
1887 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1888 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1889 } else if (TCG_TARGET_HAS_andc_i64
) {
1890 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1892 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1893 tcg_gen_not_i64(t0
, arg2
);
1894 tcg_gen_and_i64(ret
, arg1
, t0
);
1895 tcg_temp_free_i64(t0
);
1899 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1901 if (TCG_TARGET_REG_BITS
== 32) {
1902 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1903 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1904 } else if (TCG_TARGET_HAS_eqv_i64
) {
1905 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1907 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1908 tcg_gen_not_i64(ret
, ret
);
1912 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1914 if (TCG_TARGET_REG_BITS
== 32) {
1915 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1916 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1917 } else if (TCG_TARGET_HAS_nand_i64
) {
1918 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1920 tcg_gen_and_i64(ret
, arg1
, arg2
);
1921 tcg_gen_not_i64(ret
, ret
);
1925 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1927 if (TCG_TARGET_REG_BITS
== 32) {
1928 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1929 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1930 } else if (TCG_TARGET_HAS_nor_i64
) {
1931 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1933 tcg_gen_or_i64(ret
, arg1
, arg2
);
1934 tcg_gen_not_i64(ret
, ret
);
1938 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1940 if (TCG_TARGET_REG_BITS
== 32) {
1941 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1942 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1943 } else if (TCG_TARGET_HAS_orc_i64
) {
1944 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1946 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1947 tcg_gen_not_i64(t0
, arg2
);
1948 tcg_gen_or_i64(ret
, arg1
, t0
);
1949 tcg_temp_free_i64(t0
);
1953 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1955 if (TCG_TARGET_HAS_clz_i64
) {
1956 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
1958 gen_helper_clz_i64(ret
, arg1
, arg2
);
1962 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1964 if (TCG_TARGET_REG_BITS
== 32
1965 && TCG_TARGET_HAS_clz_i32
1966 && arg2
<= 0xffffffffu
) {
1967 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1968 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
1969 tcg_gen_addi_i32(t
, t
, 32);
1970 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
1971 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1972 tcg_temp_free_i32(t
);
1974 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1978 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1980 if (TCG_TARGET_HAS_ctz_i64
) {
1981 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
1982 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
1983 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
1985 if (TCG_TARGET_HAS_ctpop_i64
) {
1986 tcg_gen_subi_i64(t
, arg1
, 1);
1987 tcg_gen_andc_i64(t
, t
, arg1
);
1988 tcg_gen_ctpop_i64(t
, t
);
1990 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
1991 tcg_gen_neg_i64(t
, arg1
);
1992 tcg_gen_and_i64(t
, t
, arg1
);
1993 tcg_gen_clzi_i64(t
, t
, 64);
1994 tcg_gen_xori_i64(t
, t
, 63);
1996 z
= tcg_constant_i64(0);
1997 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
1998 tcg_temp_free_i64(t
);
1999 tcg_temp_free_i64(z
);
2001 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2005 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2007 if (TCG_TARGET_REG_BITS
== 32
2008 && TCG_TARGET_HAS_ctz_i32
2009 && arg2
<= 0xffffffffu
) {
2010 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2011 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2012 tcg_gen_addi_i32(t32
, t32
, 32);
2013 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2014 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2015 tcg_temp_free_i32(t32
);
2016 } else if (!TCG_TARGET_HAS_ctz_i64
2017 && TCG_TARGET_HAS_ctpop_i64
2019 /* This equivalence has the advantage of not requiring a fixup. */
2020 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2021 tcg_gen_subi_i64(t
, arg1
, 1);
2022 tcg_gen_andc_i64(t
, t
, arg1
);
2023 tcg_gen_ctpop_i64(ret
, t
);
2024 tcg_temp_free_i64(t
);
2026 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2030 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2032 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2033 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2034 tcg_gen_sari_i64(t
, arg
, 63);
2035 tcg_gen_xor_i64(t
, t
, arg
);
2036 tcg_gen_clzi_i64(t
, t
, 64);
2037 tcg_gen_subi_i64(ret
, t
, 1);
2038 tcg_temp_free_i64(t
);
2040 gen_helper_clrsb_i64(ret
, arg
);
2044 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2046 if (TCG_TARGET_HAS_ctpop_i64
) {
2047 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2048 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2049 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2050 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2051 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2052 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2054 gen_helper_ctpop_i64(ret
, arg1
);
2058 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2060 if (TCG_TARGET_HAS_rot_i64
) {
2061 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2064 t0
= tcg_temp_ebb_new_i64();
2065 t1
= tcg_temp_ebb_new_i64();
2066 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2067 tcg_gen_subfi_i64(t1
, 64, arg2
);
2068 tcg_gen_shr_i64(t1
, arg1
, t1
);
2069 tcg_gen_or_i64(ret
, t0
, t1
);
2070 tcg_temp_free_i64(t0
);
2071 tcg_temp_free_i64(t1
);
2075 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2077 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2078 /* some cases can be optimized here */
2080 tcg_gen_mov_i64(ret
, arg1
);
2081 } else if (TCG_TARGET_HAS_rot_i64
) {
2082 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2085 t0
= tcg_temp_ebb_new_i64();
2086 t1
= tcg_temp_ebb_new_i64();
2087 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2088 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2089 tcg_gen_or_i64(ret
, t0
, t1
);
2090 tcg_temp_free_i64(t0
);
2091 tcg_temp_free_i64(t1
);
2095 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2097 if (TCG_TARGET_HAS_rot_i64
) {
2098 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2101 t0
= tcg_temp_ebb_new_i64();
2102 t1
= tcg_temp_ebb_new_i64();
2103 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2104 tcg_gen_subfi_i64(t1
, 64, arg2
);
2105 tcg_gen_shl_i64(t1
, arg1
, t1
);
2106 tcg_gen_or_i64(ret
, t0
, t1
);
2107 tcg_temp_free_i64(t0
);
2108 tcg_temp_free_i64(t1
);
2112 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2114 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2115 /* some cases can be optimized here */
2117 tcg_gen_mov_i64(ret
, arg1
);
2119 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2123 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2124 unsigned int ofs
, unsigned int len
)
2129 tcg_debug_assert(ofs
< 64);
2130 tcg_debug_assert(len
> 0);
2131 tcg_debug_assert(len
<= 64);
2132 tcg_debug_assert(ofs
+ len
<= 64);
2135 tcg_gen_mov_i64(ret
, arg2
);
2138 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2139 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2143 if (TCG_TARGET_REG_BITS
== 32) {
2145 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2146 TCGV_LOW(arg2
), ofs
- 32, len
);
2147 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2150 if (ofs
+ len
<= 32) {
2151 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2152 TCGV_LOW(arg2
), ofs
, len
);
2153 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2158 t1
= tcg_temp_ebb_new_i64();
2160 if (TCG_TARGET_HAS_extract2_i64
) {
2161 if (ofs
+ len
== 64) {
2162 tcg_gen_shli_i64(t1
, arg1
, len
);
2163 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2167 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2168 tcg_gen_rotli_i64(ret
, ret
, len
);
2173 mask
= (1ull << len
) - 1;
2174 if (ofs
+ len
< 64) {
2175 tcg_gen_andi_i64(t1
, arg2
, mask
);
2176 tcg_gen_shli_i64(t1
, t1
, ofs
);
2178 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2180 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2181 tcg_gen_or_i64(ret
, ret
, t1
);
2183 tcg_temp_free_i64(t1
);
2186 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2187 unsigned int ofs
, unsigned int len
)
2189 tcg_debug_assert(ofs
< 64);
2190 tcg_debug_assert(len
> 0);
2191 tcg_debug_assert(len
<= 64);
2192 tcg_debug_assert(ofs
+ len
<= 64);
2194 if (ofs
+ len
== 64) {
2195 tcg_gen_shli_i64(ret
, arg
, ofs
);
2196 } else if (ofs
== 0) {
2197 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2198 } else if (TCG_TARGET_HAS_deposit_i64
2199 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2200 TCGv_i64 zero
= tcg_constant_i64(0);
2201 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2203 if (TCG_TARGET_REG_BITS
== 32) {
2205 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2207 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2210 if (ofs
+ len
<= 32) {
2211 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2212 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2216 /* To help two-operand hosts we prefer to zero-extend first,
2217 which allows ARG to stay live. */
2220 if (TCG_TARGET_HAS_ext32u_i64
) {
2221 tcg_gen_ext32u_i64(ret
, arg
);
2222 tcg_gen_shli_i64(ret
, ret
, ofs
);
2227 if (TCG_TARGET_HAS_ext16u_i64
) {
2228 tcg_gen_ext16u_i64(ret
, arg
);
2229 tcg_gen_shli_i64(ret
, ret
, ofs
);
2234 if (TCG_TARGET_HAS_ext8u_i64
) {
2235 tcg_gen_ext8u_i64(ret
, arg
);
2236 tcg_gen_shli_i64(ret
, ret
, ofs
);
2241 /* Otherwise prefer zero-extension over AND for code size. */
2242 switch (ofs
+ len
) {
2244 if (TCG_TARGET_HAS_ext32u_i64
) {
2245 tcg_gen_shli_i64(ret
, arg
, ofs
);
2246 tcg_gen_ext32u_i64(ret
, ret
);
2251 if (TCG_TARGET_HAS_ext16u_i64
) {
2252 tcg_gen_shli_i64(ret
, arg
, ofs
);
2253 tcg_gen_ext16u_i64(ret
, ret
);
2258 if (TCG_TARGET_HAS_ext8u_i64
) {
2259 tcg_gen_shli_i64(ret
, arg
, ofs
);
2260 tcg_gen_ext8u_i64(ret
, ret
);
2265 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2266 tcg_gen_shli_i64(ret
, ret
, ofs
);
2270 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2271 unsigned int ofs
, unsigned int len
)
2273 tcg_debug_assert(ofs
< 64);
2274 tcg_debug_assert(len
> 0);
2275 tcg_debug_assert(len
<= 64);
2276 tcg_debug_assert(ofs
+ len
<= 64);
2278 /* Canonicalize certain special cases, even if extract is supported. */
2279 if (ofs
+ len
== 64) {
2280 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2284 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2288 if (TCG_TARGET_REG_BITS
== 32) {
2289 /* Look for a 32-bit extract within one of the two words. */
2291 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2292 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2295 if (ofs
+ len
<= 32) {
2296 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2297 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2300 /* The field is split across two words. One double-word
2301 shift is better than two double-word shifts. */
2305 if (TCG_TARGET_HAS_extract_i64
2306 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2307 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2311 /* Assume that zero-extension, if available, is cheaper than a shift. */
2312 switch (ofs
+ len
) {
2314 if (TCG_TARGET_HAS_ext32u_i64
) {
2315 tcg_gen_ext32u_i64(ret
, arg
);
2316 tcg_gen_shri_i64(ret
, ret
, ofs
);
2321 if (TCG_TARGET_HAS_ext16u_i64
) {
2322 tcg_gen_ext16u_i64(ret
, arg
);
2323 tcg_gen_shri_i64(ret
, ret
, ofs
);
2328 if (TCG_TARGET_HAS_ext8u_i64
) {
2329 tcg_gen_ext8u_i64(ret
, arg
);
2330 tcg_gen_shri_i64(ret
, ret
, ofs
);
2336 /* ??? Ideally we'd know what values are available for immediate AND.
2337 Assume that 8 bits are available, plus the special cases of 16 and 32,
2338 so that we get ext8u, ext16u, and ext32u. */
2340 case 1 ... 8: case 16: case 32:
2342 tcg_gen_shri_i64(ret
, arg
, ofs
);
2343 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2346 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2347 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2352 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2353 unsigned int ofs
, unsigned int len
)
2355 tcg_debug_assert(ofs
< 64);
2356 tcg_debug_assert(len
> 0);
2357 tcg_debug_assert(len
<= 64);
2358 tcg_debug_assert(ofs
+ len
<= 64);
2360 /* Canonicalize certain special cases, even if sextract is supported. */
2361 if (ofs
+ len
== 64) {
2362 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2368 tcg_gen_ext32s_i64(ret
, arg
);
2371 tcg_gen_ext16s_i64(ret
, arg
);
2374 tcg_gen_ext8s_i64(ret
, arg
);
2379 if (TCG_TARGET_REG_BITS
== 32) {
2380 /* Look for a 32-bit extract within one of the two words. */
2382 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2383 } else if (ofs
+ len
<= 32) {
2384 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2385 } else if (ofs
== 0) {
2386 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2387 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2389 } else if (len
> 32) {
2390 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2391 /* Extract the bits for the high word normally. */
2392 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2393 /* Shift the field down for the low part. */
2394 tcg_gen_shri_i64(ret
, arg
, ofs
);
2395 /* Overwrite the shift into the high part. */
2396 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2397 tcg_temp_free_i32(t
);
2400 /* Shift the field down for the low part, such that the
2401 field sits at the MSB. */
2402 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2403 /* Shift the field down from the MSB, sign extending. */
2404 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2406 /* Sign-extend the field from 32 bits. */
2407 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2411 if (TCG_TARGET_HAS_sextract_i64
2412 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2413 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2417 /* Assume that sign-extension, if available, is cheaper than a shift. */
2418 switch (ofs
+ len
) {
2420 if (TCG_TARGET_HAS_ext32s_i64
) {
2421 tcg_gen_ext32s_i64(ret
, arg
);
2422 tcg_gen_sari_i64(ret
, ret
, ofs
);
2427 if (TCG_TARGET_HAS_ext16s_i64
) {
2428 tcg_gen_ext16s_i64(ret
, arg
);
2429 tcg_gen_sari_i64(ret
, ret
, ofs
);
2434 if (TCG_TARGET_HAS_ext8s_i64
) {
2435 tcg_gen_ext8s_i64(ret
, arg
);
2436 tcg_gen_sari_i64(ret
, ret
, ofs
);
2443 if (TCG_TARGET_HAS_ext32s_i64
) {
2444 tcg_gen_shri_i64(ret
, arg
, ofs
);
2445 tcg_gen_ext32s_i64(ret
, ret
);
2450 if (TCG_TARGET_HAS_ext16s_i64
) {
2451 tcg_gen_shri_i64(ret
, arg
, ofs
);
2452 tcg_gen_ext16s_i64(ret
, ret
);
2457 if (TCG_TARGET_HAS_ext8s_i64
) {
2458 tcg_gen_shri_i64(ret
, arg
, ofs
);
2459 tcg_gen_ext8s_i64(ret
, ret
);
2464 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2465 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2469 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2470 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2472 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2475 tcg_debug_assert(ofs
<= 64);
2477 tcg_gen_mov_i64(ret
, al
);
2478 } else if (ofs
== 64) {
2479 tcg_gen_mov_i64(ret
, ah
);
2480 } else if (al
== ah
) {
2481 tcg_gen_rotri_i64(ret
, al
, ofs
);
2482 } else if (TCG_TARGET_HAS_extract2_i64
) {
2483 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2485 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2486 tcg_gen_shri_i64(t0
, al
, ofs
);
2487 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2488 tcg_temp_free_i64(t0
);
2492 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2493 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2495 if (cond
== TCG_COND_ALWAYS
) {
2496 tcg_gen_mov_i64(ret
, v1
);
2497 } else if (cond
== TCG_COND_NEVER
) {
2498 tcg_gen_mov_i64(ret
, v2
);
2499 } else if (TCG_TARGET_REG_BITS
== 32) {
2500 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2501 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
2502 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2503 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2504 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2506 if (TCG_TARGET_HAS_movcond_i32
) {
2507 tcg_gen_movi_i32(t1
, 0);
2508 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2509 TCGV_LOW(v1
), TCGV_LOW(v2
));
2510 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2511 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2513 tcg_gen_neg_i32(t0
, t0
);
2515 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2516 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2517 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2519 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2520 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2521 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2523 tcg_temp_free_i32(t0
);
2524 tcg_temp_free_i32(t1
);
2525 } else if (TCG_TARGET_HAS_movcond_i64
) {
2526 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2528 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2529 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2530 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2531 tcg_gen_neg_i64(t0
, t0
);
2532 tcg_gen_and_i64(t1
, v1
, t0
);
2533 tcg_gen_andc_i64(ret
, v2
, t0
);
2534 tcg_gen_or_i64(ret
, ret
, t1
);
2535 tcg_temp_free_i64(t0
);
2536 tcg_temp_free_i64(t1
);
2540 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2541 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2543 if (TCG_TARGET_HAS_add2_i64
) {
2544 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2546 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2547 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2548 tcg_gen_add_i64(t0
, al
, bl
);
2549 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2550 tcg_gen_add_i64(rh
, ah
, bh
);
2551 tcg_gen_add_i64(rh
, rh
, t1
);
2552 tcg_gen_mov_i64(rl
, t0
);
2553 tcg_temp_free_i64(t0
);
2554 tcg_temp_free_i64(t1
);
2558 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2559 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2561 if (TCG_TARGET_HAS_sub2_i64
) {
2562 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2564 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2565 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2566 tcg_gen_sub_i64(t0
, al
, bl
);
2567 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2568 tcg_gen_sub_i64(rh
, ah
, bh
);
2569 tcg_gen_sub_i64(rh
, rh
, t1
);
2570 tcg_gen_mov_i64(rl
, t0
);
2571 tcg_temp_free_i64(t0
);
2572 tcg_temp_free_i64(t1
);
2576 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2578 if (TCG_TARGET_HAS_mulu2_i64
) {
2579 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2580 } else if (TCG_TARGET_HAS_muluh_i64
) {
2581 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2582 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2583 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2584 tcg_gen_mov_i64(rl
, t
);
2585 tcg_temp_free_i64(t
);
2587 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2588 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2589 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2590 tcg_gen_mov_i64(rl
, t0
);
2591 tcg_temp_free_i64(t0
);
2595 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2597 if (TCG_TARGET_HAS_muls2_i64
) {
2598 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2599 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2600 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2601 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2602 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2603 tcg_gen_mov_i64(rl
, t
);
2604 tcg_temp_free_i64(t
);
2605 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2606 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2607 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2608 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2609 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
2610 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2611 /* Adjust for negative inputs. */
2612 tcg_gen_sari_i64(t2
, arg1
, 63);
2613 tcg_gen_sari_i64(t3
, arg2
, 63);
2614 tcg_gen_and_i64(t2
, t2
, arg2
);
2615 tcg_gen_and_i64(t3
, t3
, arg1
);
2616 tcg_gen_sub_i64(rh
, t1
, t2
);
2617 tcg_gen_sub_i64(rh
, rh
, t3
);
2618 tcg_gen_mov_i64(rl
, t0
);
2619 tcg_temp_free_i64(t0
);
2620 tcg_temp_free_i64(t1
);
2621 tcg_temp_free_i64(t2
);
2622 tcg_temp_free_i64(t3
);
2624 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2625 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2626 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2627 tcg_gen_mov_i64(rl
, t0
);
2628 tcg_temp_free_i64(t0
);
2632 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2634 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2635 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2636 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2637 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2638 /* Adjust for negative input for the signed arg1. */
2639 tcg_gen_sari_i64(t2
, arg1
, 63);
2640 tcg_gen_and_i64(t2
, t2
, arg2
);
2641 tcg_gen_sub_i64(rh
, t1
, t2
);
2642 tcg_gen_mov_i64(rl
, t0
);
2643 tcg_temp_free_i64(t0
);
2644 tcg_temp_free_i64(t1
);
2645 tcg_temp_free_i64(t2
);
2648 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2650 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
2653 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2655 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
2658 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2660 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
2663 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2665 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
2668 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
2670 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2672 tcg_gen_sari_i64(t
, a
, 63);
2673 tcg_gen_xor_i64(ret
, a
, t
);
2674 tcg_gen_sub_i64(ret
, ret
, t
);
2675 tcg_temp_free_i64(t
);
2678 /* Size changing operations. */
2680 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2682 if (TCG_TARGET_REG_BITS
== 32) {
2683 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2684 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
2685 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2686 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2688 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2692 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2694 if (TCG_TARGET_REG_BITS
== 32) {
2695 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2696 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
2697 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2698 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2700 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2701 tcg_gen_shri_i64(t
, arg
, 32);
2702 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2703 tcg_temp_free_i64(t
);
2707 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2709 if (TCG_TARGET_REG_BITS
== 32) {
2710 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2711 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2713 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2714 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2718 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2720 if (TCG_TARGET_REG_BITS
== 32) {
2721 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2722 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2724 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2725 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2729 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2733 if (TCG_TARGET_REG_BITS
== 32) {
2734 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2735 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2739 tmp
= tcg_temp_ebb_new_i64();
2740 /* These extensions are only needed for type correctness.
2741 We may be able to do better given target specific information. */
2742 tcg_gen_extu_i32_i64(tmp
, high
);
2743 tcg_gen_extu_i32_i64(dest
, low
);
2744 /* If deposit is available, use it. Otherwise use the extra
2745 knowledge that we have of the zero-extensions above. */
2746 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2747 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2749 tcg_gen_shli_i64(tmp
, tmp
, 32);
2750 tcg_gen_or_i64(dest
, dest
, tmp
);
2752 tcg_temp_free_i64(tmp
);
2755 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2757 if (TCG_TARGET_REG_BITS
== 32) {
2758 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2759 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2761 tcg_gen_extrl_i64_i32(lo
, arg
);
2762 tcg_gen_extrh_i64_i32(hi
, arg
);
2766 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2768 tcg_gen_ext32u_i64(lo
, arg
);
2769 tcg_gen_shri_i64(hi
, arg
, 32);
2772 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
2774 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
2775 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
2778 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
2780 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
2781 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
2784 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
2787 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
2788 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
2792 /* QEMU specific operations. */
2794 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
2797 * Let the jit code return the read-only version of the
2798 * TranslationBlock, so that we minimize the pc-relative
2799 * distance of the address of the exit_tb code to TB.
2800 * This will improve utilization of pc-relative address loads.
2802 * TODO: Move this to translator_loop, so that all const
2803 * TranslationBlock pointers refer to read-only memory.
2804 * This requires coordination with targets that do not use
2805 * the translator_loop.
2807 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
2810 tcg_debug_assert(idx
== 0);
2811 } else if (idx
<= TB_EXIT_IDXMAX
) {
2812 #ifdef CONFIG_DEBUG_TCG
2813 /* This is an exit following a goto_tb. Verify that we have
2814 seen this numbered exit before, via tcg_gen_goto_tb. */
2815 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
2818 /* This is an exit via the exitreq label. */
2819 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
2822 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2825 void tcg_gen_goto_tb(unsigned idx
)
2827 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2828 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
2829 /* We only support two chained exits. */
2830 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
2831 #ifdef CONFIG_DEBUG_TCG
2832 /* Verify that we haven't seen this numbered exit before. */
2833 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2834 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2836 plugin_gen_disable_mem_helpers();
2837 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2840 void tcg_gen_lookup_and_goto_ptr(void)
2844 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
2845 tcg_gen_exit_tb(NULL
, 0);
2849 plugin_gen_disable_mem_helpers();
2850 ptr
= tcg_temp_ebb_new_ptr();
2851 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2852 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2853 tcg_temp_free_ptr(ptr
);