2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
34 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
36 TCGOp
*op
= tcg_emit_op(opc
, 1);
40 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
42 TCGOp
*op
= tcg_emit_op(opc
, 2);
47 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
49 TCGOp
*op
= tcg_emit_op(opc
, 3);
55 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
57 TCGOp
*op
= tcg_emit_op(opc
, 4);
64 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
67 TCGOp
*op
= tcg_emit_op(opc
, 5);
75 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
76 TCGArg a4
, TCGArg a5
, TCGArg a6
)
78 TCGOp
*op
= tcg_emit_op(opc
, 6);
89 static void add_last_as_label_use(TCGLabel
*l
)
91 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
93 u
->op
= tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
97 void tcg_gen_br(TCGLabel
*l
)
99 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
100 add_last_as_label_use(l
);
103 void tcg_gen_mb(TCGBar mb_type
)
105 if (tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
) {
106 tcg_gen_op1(INDEX_op_mb
, mb_type
);
112 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
114 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
117 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
119 /* some cases can be optimized here */
121 tcg_gen_mov_i32(ret
, arg1
);
123 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
127 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
129 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
130 /* Don't recurse with tcg_gen_neg_i32. */
131 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
133 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
137 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
139 /* some cases can be optimized here */
141 tcg_gen_mov_i32(ret
, arg1
);
143 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
147 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
149 /* Some cases can be optimized here. */
152 tcg_gen_movi_i32(ret
, 0);
155 tcg_gen_mov_i32(ret
, arg1
);
158 /* Don't recurse with tcg_gen_ext8u_i32. */
159 if (TCG_TARGET_HAS_ext8u_i32
) {
160 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
165 if (TCG_TARGET_HAS_ext16u_i32
) {
166 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
172 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
175 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
177 /* Some cases can be optimized here. */
179 tcg_gen_movi_i32(ret
, -1);
180 } else if (arg2
== 0) {
181 tcg_gen_mov_i32(ret
, arg1
);
183 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
187 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_mov_i32(ret
, arg1
);
192 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
193 /* Don't recurse with tcg_gen_not_i32. */
194 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
196 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
200 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
202 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
204 tcg_gen_mov_i32(ret
, arg1
);
206 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
210 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
212 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
214 tcg_gen_mov_i32(ret
, arg1
);
216 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
220 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
222 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
224 tcg_gen_mov_i32(ret
, arg1
);
226 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
230 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
232 if (cond
== TCG_COND_ALWAYS
) {
234 } else if (cond
!= TCG_COND_NEVER
) {
235 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
236 add_last_as_label_use(l
);
240 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
242 if (cond
== TCG_COND_ALWAYS
) {
244 } else if (cond
!= TCG_COND_NEVER
) {
245 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
249 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
250 TCGv_i32 arg1
, TCGv_i32 arg2
)
252 if (cond
== TCG_COND_ALWAYS
) {
253 tcg_gen_movi_i32(ret
, 1);
254 } else if (cond
== TCG_COND_NEVER
) {
255 tcg_gen_movi_i32(ret
, 0);
257 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
261 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
262 TCGv_i32 arg1
, int32_t arg2
)
264 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
267 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
270 tcg_gen_movi_i32(ret
, 0);
271 } else if (is_power_of_2(arg2
)) {
272 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
274 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
278 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
280 if (TCG_TARGET_HAS_div_i32
) {
281 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
282 } else if (TCG_TARGET_HAS_div2_i32
) {
283 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
284 tcg_gen_sari_i32(t0
, arg1
, 31);
285 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
286 tcg_temp_free_i32(t0
);
288 gen_helper_div_i32(ret
, arg1
, arg2
);
292 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
294 if (TCG_TARGET_HAS_rem_i32
) {
295 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
296 } else if (TCG_TARGET_HAS_div_i32
) {
297 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
298 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
299 tcg_gen_mul_i32(t0
, t0
, arg2
);
300 tcg_gen_sub_i32(ret
, arg1
, t0
);
301 tcg_temp_free_i32(t0
);
302 } else if (TCG_TARGET_HAS_div2_i32
) {
303 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
304 tcg_gen_sari_i32(t0
, arg1
, 31);
305 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
306 tcg_temp_free_i32(t0
);
308 gen_helper_rem_i32(ret
, arg1
, arg2
);
312 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
314 if (TCG_TARGET_HAS_div_i32
) {
315 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
316 } else if (TCG_TARGET_HAS_div2_i32
) {
317 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
318 tcg_gen_movi_i32(t0
, 0);
319 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
320 tcg_temp_free_i32(t0
);
322 gen_helper_divu_i32(ret
, arg1
, arg2
);
326 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
328 if (TCG_TARGET_HAS_rem_i32
) {
329 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
330 } else if (TCG_TARGET_HAS_div_i32
) {
331 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
332 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
333 tcg_gen_mul_i32(t0
, t0
, arg2
);
334 tcg_gen_sub_i32(ret
, arg1
, t0
);
335 tcg_temp_free_i32(t0
);
336 } else if (TCG_TARGET_HAS_div2_i32
) {
337 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
338 tcg_gen_movi_i32(t0
, 0);
339 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
340 tcg_temp_free_i32(t0
);
342 gen_helper_remu_i32(ret
, arg1
, arg2
);
346 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
348 if (TCG_TARGET_HAS_andc_i32
) {
349 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
351 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
352 tcg_gen_not_i32(t0
, arg2
);
353 tcg_gen_and_i32(ret
, arg1
, t0
);
354 tcg_temp_free_i32(t0
);
358 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
360 if (TCG_TARGET_HAS_eqv_i32
) {
361 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
363 tcg_gen_xor_i32(ret
, arg1
, arg2
);
364 tcg_gen_not_i32(ret
, ret
);
368 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
370 if (TCG_TARGET_HAS_nand_i32
) {
371 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
373 tcg_gen_and_i32(ret
, arg1
, arg2
);
374 tcg_gen_not_i32(ret
, ret
);
378 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
380 if (TCG_TARGET_HAS_nor_i32
) {
381 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
383 tcg_gen_or_i32(ret
, arg1
, arg2
);
384 tcg_gen_not_i32(ret
, ret
);
388 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
390 if (TCG_TARGET_HAS_orc_i32
) {
391 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
393 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
394 tcg_gen_not_i32(t0
, arg2
);
395 tcg_gen_or_i32(ret
, arg1
, t0
);
396 tcg_temp_free_i32(t0
);
400 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
402 if (TCG_TARGET_HAS_clz_i32
) {
403 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
404 } else if (TCG_TARGET_HAS_clz_i64
) {
405 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
406 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
407 tcg_gen_extu_i32_i64(t1
, arg1
);
408 tcg_gen_extu_i32_i64(t2
, arg2
);
409 tcg_gen_addi_i64(t2
, t2
, 32);
410 tcg_gen_clz_i64(t1
, t1
, t2
);
411 tcg_gen_extrl_i64_i32(ret
, t1
);
412 tcg_temp_free_i64(t1
);
413 tcg_temp_free_i64(t2
);
414 tcg_gen_subi_i32(ret
, ret
, 32);
416 gen_helper_clz_i32(ret
, arg1
, arg2
);
420 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
422 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
425 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
427 if (TCG_TARGET_HAS_ctz_i32
) {
428 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
429 } else if (TCG_TARGET_HAS_ctz_i64
) {
430 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
431 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
432 tcg_gen_extu_i32_i64(t1
, arg1
);
433 tcg_gen_extu_i32_i64(t2
, arg2
);
434 tcg_gen_ctz_i64(t1
, t1
, t2
);
435 tcg_gen_extrl_i64_i32(ret
, t1
);
436 tcg_temp_free_i64(t1
);
437 tcg_temp_free_i64(t2
);
438 } else if (TCG_TARGET_HAS_ctpop_i32
439 || TCG_TARGET_HAS_ctpop_i64
440 || TCG_TARGET_HAS_clz_i32
441 || TCG_TARGET_HAS_clz_i64
) {
442 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
444 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
445 tcg_gen_subi_i32(t
, arg1
, 1);
446 tcg_gen_andc_i32(t
, t
, arg1
);
447 tcg_gen_ctpop_i32(t
, t
);
449 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
450 tcg_gen_neg_i32(t
, arg1
);
451 tcg_gen_and_i32(t
, t
, arg1
);
452 tcg_gen_clzi_i32(t
, t
, 32);
453 tcg_gen_xori_i32(t
, t
, 31);
455 z
= tcg_constant_i32(0);
456 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
457 tcg_temp_free_i32(t
);
459 gen_helper_ctz_i32(ret
, arg1
, arg2
);
463 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
465 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
466 /* This equivalence has the advantage of not requiring a fixup. */
467 TCGv_i32 t
= tcg_temp_ebb_new_i32();
468 tcg_gen_subi_i32(t
, arg1
, 1);
469 tcg_gen_andc_i32(t
, t
, arg1
);
470 tcg_gen_ctpop_i32(ret
, t
);
471 tcg_temp_free_i32(t
);
473 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
477 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
479 if (TCG_TARGET_HAS_clz_i32
) {
480 TCGv_i32 t
= tcg_temp_ebb_new_i32();
481 tcg_gen_sari_i32(t
, arg
, 31);
482 tcg_gen_xor_i32(t
, t
, arg
);
483 tcg_gen_clzi_i32(t
, t
, 32);
484 tcg_gen_subi_i32(ret
, t
, 1);
485 tcg_temp_free_i32(t
);
487 gen_helper_clrsb_i32(ret
, arg
);
491 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
493 if (TCG_TARGET_HAS_ctpop_i32
) {
494 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
495 } else if (TCG_TARGET_HAS_ctpop_i64
) {
496 TCGv_i64 t
= tcg_temp_ebb_new_i64();
497 tcg_gen_extu_i32_i64(t
, arg1
);
498 tcg_gen_ctpop_i64(t
, t
);
499 tcg_gen_extrl_i64_i32(ret
, t
);
500 tcg_temp_free_i64(t
);
502 gen_helper_ctpop_i32(ret
, arg1
);
506 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
508 if (TCG_TARGET_HAS_rot_i32
) {
509 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
513 t0
= tcg_temp_ebb_new_i32();
514 t1
= tcg_temp_ebb_new_i32();
515 tcg_gen_shl_i32(t0
, arg1
, arg2
);
516 tcg_gen_subfi_i32(t1
, 32, arg2
);
517 tcg_gen_shr_i32(t1
, arg1
, t1
);
518 tcg_gen_or_i32(ret
, t0
, t1
);
519 tcg_temp_free_i32(t0
);
520 tcg_temp_free_i32(t1
);
524 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
526 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
527 /* some cases can be optimized here */
529 tcg_gen_mov_i32(ret
, arg1
);
530 } else if (TCG_TARGET_HAS_rot_i32
) {
531 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
534 t0
= tcg_temp_ebb_new_i32();
535 t1
= tcg_temp_ebb_new_i32();
536 tcg_gen_shli_i32(t0
, arg1
, arg2
);
537 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
538 tcg_gen_or_i32(ret
, t0
, t1
);
539 tcg_temp_free_i32(t0
);
540 tcg_temp_free_i32(t1
);
544 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
546 if (TCG_TARGET_HAS_rot_i32
) {
547 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
551 t0
= tcg_temp_ebb_new_i32();
552 t1
= tcg_temp_ebb_new_i32();
553 tcg_gen_shr_i32(t0
, arg1
, arg2
);
554 tcg_gen_subfi_i32(t1
, 32, arg2
);
555 tcg_gen_shl_i32(t1
, arg1
, t1
);
556 tcg_gen_or_i32(ret
, t0
, t1
);
557 tcg_temp_free_i32(t0
);
558 tcg_temp_free_i32(t1
);
562 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
564 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
565 /* some cases can be optimized here */
567 tcg_gen_mov_i32(ret
, arg1
);
569 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
573 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
574 unsigned int ofs
, unsigned int len
)
579 tcg_debug_assert(ofs
< 32);
580 tcg_debug_assert(len
> 0);
581 tcg_debug_assert(len
<= 32);
582 tcg_debug_assert(ofs
+ len
<= 32);
585 tcg_gen_mov_i32(ret
, arg2
);
588 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
589 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
593 t1
= tcg_temp_ebb_new_i32();
595 if (TCG_TARGET_HAS_extract2_i32
) {
596 if (ofs
+ len
== 32) {
597 tcg_gen_shli_i32(t1
, arg1
, len
);
598 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
602 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
603 tcg_gen_rotli_i32(ret
, ret
, len
);
608 mask
= (1u << len
) - 1;
609 if (ofs
+ len
< 32) {
610 tcg_gen_andi_i32(t1
, arg2
, mask
);
611 tcg_gen_shli_i32(t1
, t1
, ofs
);
613 tcg_gen_shli_i32(t1
, arg2
, ofs
);
615 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
616 tcg_gen_or_i32(ret
, ret
, t1
);
618 tcg_temp_free_i32(t1
);
621 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
622 unsigned int ofs
, unsigned int len
)
624 tcg_debug_assert(ofs
< 32);
625 tcg_debug_assert(len
> 0);
626 tcg_debug_assert(len
<= 32);
627 tcg_debug_assert(ofs
+ len
<= 32);
629 if (ofs
+ len
== 32) {
630 tcg_gen_shli_i32(ret
, arg
, ofs
);
631 } else if (ofs
== 0) {
632 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
633 } else if (TCG_TARGET_HAS_deposit_i32
634 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
635 TCGv_i32 zero
= tcg_constant_i32(0);
636 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
638 /* To help two-operand hosts we prefer to zero-extend first,
639 which allows ARG to stay live. */
642 if (TCG_TARGET_HAS_ext16u_i32
) {
643 tcg_gen_ext16u_i32(ret
, arg
);
644 tcg_gen_shli_i32(ret
, ret
, ofs
);
649 if (TCG_TARGET_HAS_ext8u_i32
) {
650 tcg_gen_ext8u_i32(ret
, arg
);
651 tcg_gen_shli_i32(ret
, ret
, ofs
);
656 /* Otherwise prefer zero-extension over AND for code size. */
659 if (TCG_TARGET_HAS_ext16u_i32
) {
660 tcg_gen_shli_i32(ret
, arg
, ofs
);
661 tcg_gen_ext16u_i32(ret
, ret
);
666 if (TCG_TARGET_HAS_ext8u_i32
) {
667 tcg_gen_shli_i32(ret
, arg
, ofs
);
668 tcg_gen_ext8u_i32(ret
, ret
);
673 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
674 tcg_gen_shli_i32(ret
, ret
, ofs
);
678 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
679 unsigned int ofs
, unsigned int len
)
681 tcg_debug_assert(ofs
< 32);
682 tcg_debug_assert(len
> 0);
683 tcg_debug_assert(len
<= 32);
684 tcg_debug_assert(ofs
+ len
<= 32);
686 /* Canonicalize certain special cases, even if extract is supported. */
687 if (ofs
+ len
== 32) {
688 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
692 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
696 if (TCG_TARGET_HAS_extract_i32
697 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
698 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
702 /* Assume that zero-extension, if available, is cheaper than a shift. */
705 if (TCG_TARGET_HAS_ext16u_i32
) {
706 tcg_gen_ext16u_i32(ret
, arg
);
707 tcg_gen_shri_i32(ret
, ret
, ofs
);
712 if (TCG_TARGET_HAS_ext8u_i32
) {
713 tcg_gen_ext8u_i32(ret
, arg
);
714 tcg_gen_shri_i32(ret
, ret
, ofs
);
720 /* ??? Ideally we'd know what values are available for immediate AND.
721 Assume that 8 bits are available, plus the special case of 16,
722 so that we get ext8u, ext16u. */
724 case 1 ... 8: case 16:
725 tcg_gen_shri_i32(ret
, arg
, ofs
);
726 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
729 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
730 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
735 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
736 unsigned int ofs
, unsigned int len
)
738 tcg_debug_assert(ofs
< 32);
739 tcg_debug_assert(len
> 0);
740 tcg_debug_assert(len
<= 32);
741 tcg_debug_assert(ofs
+ len
<= 32);
743 /* Canonicalize certain special cases, even if extract is supported. */
744 if (ofs
+ len
== 32) {
745 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
751 tcg_gen_ext16s_i32(ret
, arg
);
754 tcg_gen_ext8s_i32(ret
, arg
);
759 if (TCG_TARGET_HAS_sextract_i32
760 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
761 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
765 /* Assume that sign-extension, if available, is cheaper than a shift. */
768 if (TCG_TARGET_HAS_ext16s_i32
) {
769 tcg_gen_ext16s_i32(ret
, arg
);
770 tcg_gen_sari_i32(ret
, ret
, ofs
);
775 if (TCG_TARGET_HAS_ext8s_i32
) {
776 tcg_gen_ext8s_i32(ret
, arg
);
777 tcg_gen_sari_i32(ret
, ret
, ofs
);
784 if (TCG_TARGET_HAS_ext16s_i32
) {
785 tcg_gen_shri_i32(ret
, arg
, ofs
);
786 tcg_gen_ext16s_i32(ret
, ret
);
791 if (TCG_TARGET_HAS_ext8s_i32
) {
792 tcg_gen_shri_i32(ret
, arg
, ofs
);
793 tcg_gen_ext8s_i32(ret
, ret
);
799 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
800 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
804 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
805 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
807 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
810 tcg_debug_assert(ofs
<= 32);
812 tcg_gen_mov_i32(ret
, al
);
813 } else if (ofs
== 32) {
814 tcg_gen_mov_i32(ret
, ah
);
815 } else if (al
== ah
) {
816 tcg_gen_rotri_i32(ret
, al
, ofs
);
817 } else if (TCG_TARGET_HAS_extract2_i32
) {
818 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
820 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
821 tcg_gen_shri_i32(t0
, al
, ofs
);
822 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
823 tcg_temp_free_i32(t0
);
827 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
828 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
830 if (cond
== TCG_COND_ALWAYS
) {
831 tcg_gen_mov_i32(ret
, v1
);
832 } else if (cond
== TCG_COND_NEVER
) {
833 tcg_gen_mov_i32(ret
, v2
);
834 } else if (TCG_TARGET_HAS_movcond_i32
) {
835 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
837 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
838 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
839 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
840 tcg_gen_neg_i32(t0
, t0
);
841 tcg_gen_and_i32(t1
, v1
, t0
);
842 tcg_gen_andc_i32(ret
, v2
, t0
);
843 tcg_gen_or_i32(ret
, ret
, t1
);
844 tcg_temp_free_i32(t0
);
845 tcg_temp_free_i32(t1
);
849 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
850 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
852 if (TCG_TARGET_HAS_add2_i32
) {
853 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
855 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
856 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
857 tcg_gen_concat_i32_i64(t0
, al
, ah
);
858 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
859 tcg_gen_add_i64(t0
, t0
, t1
);
860 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
861 tcg_temp_free_i64(t0
);
862 tcg_temp_free_i64(t1
);
866 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
867 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
869 if (TCG_TARGET_HAS_sub2_i32
) {
870 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
872 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
873 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
874 tcg_gen_concat_i32_i64(t0
, al
, ah
);
875 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
876 tcg_gen_sub_i64(t0
, t0
, t1
);
877 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
878 tcg_temp_free_i64(t0
);
879 tcg_temp_free_i64(t1
);
883 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
885 if (TCG_TARGET_HAS_mulu2_i32
) {
886 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
887 } else if (TCG_TARGET_HAS_muluh_i32
) {
888 TCGv_i32 t
= tcg_temp_ebb_new_i32();
889 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
890 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
891 tcg_gen_mov_i32(rl
, t
);
892 tcg_temp_free_i32(t
);
893 } else if (TCG_TARGET_REG_BITS
== 64) {
894 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
895 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
896 tcg_gen_extu_i32_i64(t0
, arg1
);
897 tcg_gen_extu_i32_i64(t1
, arg2
);
898 tcg_gen_mul_i64(t0
, t0
, t1
);
899 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
900 tcg_temp_free_i64(t0
);
901 tcg_temp_free_i64(t1
);
903 qemu_build_not_reached();
907 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
909 if (TCG_TARGET_HAS_muls2_i32
) {
910 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
911 } else if (TCG_TARGET_HAS_mulsh_i32
) {
912 TCGv_i32 t
= tcg_temp_ebb_new_i32();
913 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
914 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
915 tcg_gen_mov_i32(rl
, t
);
916 tcg_temp_free_i32(t
);
917 } else if (TCG_TARGET_REG_BITS
== 32) {
918 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
919 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
920 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
921 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
922 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
923 /* Adjust for negative inputs. */
924 tcg_gen_sari_i32(t2
, arg1
, 31);
925 tcg_gen_sari_i32(t3
, arg2
, 31);
926 tcg_gen_and_i32(t2
, t2
, arg2
);
927 tcg_gen_and_i32(t3
, t3
, arg1
);
928 tcg_gen_sub_i32(rh
, t1
, t2
);
929 tcg_gen_sub_i32(rh
, rh
, t3
);
930 tcg_gen_mov_i32(rl
, t0
);
931 tcg_temp_free_i32(t0
);
932 tcg_temp_free_i32(t1
);
933 tcg_temp_free_i32(t2
);
934 tcg_temp_free_i32(t3
);
936 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
937 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
938 tcg_gen_ext_i32_i64(t0
, arg1
);
939 tcg_gen_ext_i32_i64(t1
, arg2
);
940 tcg_gen_mul_i64(t0
, t0
, t1
);
941 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
942 tcg_temp_free_i64(t0
);
943 tcg_temp_free_i64(t1
);
947 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
949 if (TCG_TARGET_REG_BITS
== 32) {
950 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
951 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
952 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
953 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
954 /* Adjust for negative input for the signed arg1. */
955 tcg_gen_sari_i32(t2
, arg1
, 31);
956 tcg_gen_and_i32(t2
, t2
, arg2
);
957 tcg_gen_sub_i32(rh
, t1
, t2
);
958 tcg_gen_mov_i32(rl
, t0
);
959 tcg_temp_free_i32(t0
);
960 tcg_temp_free_i32(t1
);
961 tcg_temp_free_i32(t2
);
963 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
964 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
965 tcg_gen_ext_i32_i64(t0
, arg1
);
966 tcg_gen_extu_i32_i64(t1
, arg2
);
967 tcg_gen_mul_i64(t0
, t0
, t1
);
968 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
969 tcg_temp_free_i64(t0
);
970 tcg_temp_free_i64(t1
);
974 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
976 if (TCG_TARGET_HAS_ext8s_i32
) {
977 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
979 tcg_gen_shli_i32(ret
, arg
, 24);
980 tcg_gen_sari_i32(ret
, ret
, 24);
984 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
986 if (TCG_TARGET_HAS_ext16s_i32
) {
987 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
989 tcg_gen_shli_i32(ret
, arg
, 16);
990 tcg_gen_sari_i32(ret
, ret
, 16);
994 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
996 if (TCG_TARGET_HAS_ext8u_i32
) {
997 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
999 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1003 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1005 if (TCG_TARGET_HAS_ext16u_i32
) {
1006 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1008 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1012 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1014 /* Only one extension flag may be present. */
1015 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1017 if (TCG_TARGET_HAS_bswap16_i32
) {
1018 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1020 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1021 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1023 tcg_gen_shri_i32(t0
, arg
, 8);
1024 if (!(flags
& TCG_BSWAP_IZ
)) {
1025 tcg_gen_ext8u_i32(t0
, t0
);
1028 if (flags
& TCG_BSWAP_OS
) {
1029 tcg_gen_shli_i32(t1
, arg
, 24);
1030 tcg_gen_sari_i32(t1
, t1
, 16);
1031 } else if (flags
& TCG_BSWAP_OZ
) {
1032 tcg_gen_ext8u_i32(t1
, arg
);
1033 tcg_gen_shli_i32(t1
, t1
, 8);
1035 tcg_gen_shli_i32(t1
, arg
, 8);
1038 tcg_gen_or_i32(ret
, t0
, t1
);
1039 tcg_temp_free_i32(t0
);
1040 tcg_temp_free_i32(t1
);
1044 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1046 if (TCG_TARGET_HAS_bswap32_i32
) {
1047 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1049 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1050 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1051 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1054 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1055 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1056 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1057 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1058 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1060 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1061 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1062 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1064 tcg_temp_free_i32(t0
);
1065 tcg_temp_free_i32(t1
);
1069 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1071 /* Swapping 2 16-bit elements is a rotate. */
1072 tcg_gen_rotli_i32(ret
, arg
, 16);
1075 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1077 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1080 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1082 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1085 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1087 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1090 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1092 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1095 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1097 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1099 tcg_gen_sari_i32(t
, a
, 31);
1100 tcg_gen_xor_i32(ret
, a
, t
);
1101 tcg_gen_sub_i32(ret
, ret
, t
);
1102 tcg_temp_free_i32(t
);
1107 #if TCG_TARGET_REG_BITS == 32
1108 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1110 void tcg_gen_discard_i64(TCGv_i64 arg
)
1112 tcg_gen_discard_i32(TCGV_LOW(arg
));
1113 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1116 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1118 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1120 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1121 if (ts
->kind
== TEMP_CONST
) {
1122 tcg_gen_movi_i64(ret
, ts
->val
);
1124 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1125 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1129 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1131 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1132 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1135 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1137 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1138 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1141 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1143 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1144 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1147 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1149 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1150 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1153 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1155 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1156 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1159 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1161 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1162 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1165 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1167 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1168 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1171 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1173 /* Since arg2 and ret have different types,
1174 they cannot be the same temporary */
1176 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1177 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1179 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1180 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1184 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1186 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1189 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1191 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1194 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1196 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1199 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1202 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1203 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1205 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1206 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1210 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1212 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1213 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1216 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1218 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1219 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1222 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1224 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1225 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1228 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1230 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1231 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1234 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1236 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1237 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1240 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1242 gen_helper_shl_i64(ret
, arg1
, arg2
);
1245 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1247 gen_helper_shr_i64(ret
, arg1
, arg2
);
1250 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1252 gen_helper_sar_i64(ret
, arg1
, arg2
);
1255 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1260 t0
= tcg_temp_ebb_new_i64();
1261 t1
= tcg_temp_ebb_new_i32();
1263 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1264 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1266 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1267 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1268 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1269 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1271 tcg_gen_mov_i64(ret
, t0
);
1272 tcg_temp_free_i64(t0
);
1273 tcg_temp_free_i32(t1
);
1278 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1280 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1283 #endif /* TCG_TARGET_REG_SIZE == 32 */
1285 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1287 /* some cases can be optimized here */
1289 tcg_gen_mov_i64(ret
, arg1
);
1290 } else if (TCG_TARGET_REG_BITS
== 64) {
1291 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1293 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1294 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1295 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1299 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1301 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1302 /* Don't recurse with tcg_gen_neg_i64. */
1303 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1304 } else if (TCG_TARGET_REG_BITS
== 64) {
1305 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1307 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1308 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1309 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1313 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1315 /* some cases can be optimized here */
1317 tcg_gen_mov_i64(ret
, arg1
);
1318 } else if (TCG_TARGET_REG_BITS
== 64) {
1319 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1321 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1322 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1323 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1327 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1329 if (TCG_TARGET_REG_BITS
== 32) {
1330 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1331 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1335 /* Some cases can be optimized here. */
1338 tcg_gen_movi_i64(ret
, 0);
1341 tcg_gen_mov_i64(ret
, arg1
);
1344 /* Don't recurse with tcg_gen_ext8u_i64. */
1345 if (TCG_TARGET_HAS_ext8u_i64
) {
1346 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1351 if (TCG_TARGET_HAS_ext16u_i64
) {
1352 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1357 if (TCG_TARGET_HAS_ext32u_i64
) {
1358 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1364 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1367 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1369 if (TCG_TARGET_REG_BITS
== 32) {
1370 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1371 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1374 /* Some cases can be optimized here. */
1376 tcg_gen_movi_i64(ret
, -1);
1377 } else if (arg2
== 0) {
1378 tcg_gen_mov_i64(ret
, arg1
);
1380 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1384 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1386 if (TCG_TARGET_REG_BITS
== 32) {
1387 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1388 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1391 /* Some cases can be optimized here. */
1393 tcg_gen_mov_i64(ret
, arg1
);
1394 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1395 /* Don't recurse with tcg_gen_not_i64. */
1396 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1398 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1402 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1403 unsigned c
, bool right
, bool arith
)
1405 tcg_debug_assert(c
< 64);
1407 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1408 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1409 } else if (c
>= 32) {
1413 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1414 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1416 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1417 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1420 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1421 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1424 if (TCG_TARGET_HAS_extract2_i32
) {
1425 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1426 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1428 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1429 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1430 TCGV_HIGH(arg1
), 32 - c
, c
);
1433 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1435 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1438 if (TCG_TARGET_HAS_extract2_i32
) {
1439 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1440 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1442 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1443 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1444 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1445 TCGV_HIGH(arg1
), c
, 32 - c
);
1446 tcg_temp_free_i32(t0
);
1448 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1452 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1454 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1455 if (TCG_TARGET_REG_BITS
== 32) {
1456 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1457 } else if (arg2
== 0) {
1458 tcg_gen_mov_i64(ret
, arg1
);
1460 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1464 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1466 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1467 if (TCG_TARGET_REG_BITS
== 32) {
1468 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1469 } else if (arg2
== 0) {
1470 tcg_gen_mov_i64(ret
, arg1
);
1472 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1476 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1478 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1479 if (TCG_TARGET_REG_BITS
== 32) {
1480 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1481 } else if (arg2
== 0) {
1482 tcg_gen_mov_i64(ret
, arg1
);
1484 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1488 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1490 if (cond
== TCG_COND_ALWAYS
) {
1492 } else if (cond
!= TCG_COND_NEVER
) {
1493 if (TCG_TARGET_REG_BITS
== 32) {
1494 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1495 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1496 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1498 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1501 add_last_as_label_use(l
);
1505 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1507 if (TCG_TARGET_REG_BITS
== 64) {
1508 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1509 } else if (cond
== TCG_COND_ALWAYS
) {
1511 } else if (cond
!= TCG_COND_NEVER
) {
1512 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1513 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1514 tcg_constant_i32(arg2
),
1515 tcg_constant_i32(arg2
>> 32),
1516 cond
, label_arg(l
));
1517 add_last_as_label_use(l
);
1521 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1522 TCGv_i64 arg1
, TCGv_i64 arg2
)
1524 if (cond
== TCG_COND_ALWAYS
) {
1525 tcg_gen_movi_i64(ret
, 1);
1526 } else if (cond
== TCG_COND_NEVER
) {
1527 tcg_gen_movi_i64(ret
, 0);
1529 if (TCG_TARGET_REG_BITS
== 32) {
1530 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1531 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1532 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1533 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1535 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1540 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1541 TCGv_i64 arg1
, int64_t arg2
)
1543 if (TCG_TARGET_REG_BITS
== 64) {
1544 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1545 } else if (cond
== TCG_COND_ALWAYS
) {
1546 tcg_gen_movi_i64(ret
, 1);
1547 } else if (cond
== TCG_COND_NEVER
) {
1548 tcg_gen_movi_i64(ret
, 0);
1550 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1551 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1552 tcg_constant_i32(arg2
),
1553 tcg_constant_i32(arg2
>> 32), cond
);
1554 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1558 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1561 tcg_gen_movi_i64(ret
, 0);
1562 } else if (is_power_of_2(arg2
)) {
1563 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1565 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1569 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1571 if (TCG_TARGET_HAS_div_i64
) {
1572 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1573 } else if (TCG_TARGET_HAS_div2_i64
) {
1574 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1575 tcg_gen_sari_i64(t0
, arg1
, 63);
1576 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1577 tcg_temp_free_i64(t0
);
1579 gen_helper_div_i64(ret
, arg1
, arg2
);
1583 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1585 if (TCG_TARGET_HAS_rem_i64
) {
1586 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1587 } else if (TCG_TARGET_HAS_div_i64
) {
1588 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1589 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1590 tcg_gen_mul_i64(t0
, t0
, arg2
);
1591 tcg_gen_sub_i64(ret
, arg1
, t0
);
1592 tcg_temp_free_i64(t0
);
1593 } else if (TCG_TARGET_HAS_div2_i64
) {
1594 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1595 tcg_gen_sari_i64(t0
, arg1
, 63);
1596 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1597 tcg_temp_free_i64(t0
);
1599 gen_helper_rem_i64(ret
, arg1
, arg2
);
1603 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1605 if (TCG_TARGET_HAS_div_i64
) {
1606 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1607 } else if (TCG_TARGET_HAS_div2_i64
) {
1608 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1609 tcg_gen_movi_i64(t0
, 0);
1610 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1611 tcg_temp_free_i64(t0
);
1613 gen_helper_divu_i64(ret
, arg1
, arg2
);
1617 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1619 if (TCG_TARGET_HAS_rem_i64
) {
1620 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1621 } else if (TCG_TARGET_HAS_div_i64
) {
1622 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1623 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1624 tcg_gen_mul_i64(t0
, t0
, arg2
);
1625 tcg_gen_sub_i64(ret
, arg1
, t0
);
1626 tcg_temp_free_i64(t0
);
1627 } else if (TCG_TARGET_HAS_div2_i64
) {
1628 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1629 tcg_gen_movi_i64(t0
, 0);
1630 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1631 tcg_temp_free_i64(t0
);
1633 gen_helper_remu_i64(ret
, arg1
, arg2
);
1637 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1639 if (TCG_TARGET_REG_BITS
== 32) {
1640 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1641 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1642 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1643 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1645 tcg_gen_shli_i64(ret
, arg
, 56);
1646 tcg_gen_sari_i64(ret
, ret
, 56);
1650 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1652 if (TCG_TARGET_REG_BITS
== 32) {
1653 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1654 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1655 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1656 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1658 tcg_gen_shli_i64(ret
, arg
, 48);
1659 tcg_gen_sari_i64(ret
, ret
, 48);
1663 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1665 if (TCG_TARGET_REG_BITS
== 32) {
1666 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1667 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1668 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1669 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1671 tcg_gen_shli_i64(ret
, arg
, 32);
1672 tcg_gen_sari_i64(ret
, ret
, 32);
1676 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1678 if (TCG_TARGET_REG_BITS
== 32) {
1679 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1680 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1681 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1682 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1684 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1688 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1690 if (TCG_TARGET_REG_BITS
== 32) {
1691 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1692 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1693 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1694 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1696 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1700 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1702 if (TCG_TARGET_REG_BITS
== 32) {
1703 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1704 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1705 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1706 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1708 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1712 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1714 /* Only one extension flag may be present. */
1715 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1717 if (TCG_TARGET_REG_BITS
== 32) {
1718 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
1719 if (flags
& TCG_BSWAP_OS
) {
1720 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1722 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1724 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1725 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
1727 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1728 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1730 tcg_gen_shri_i64(t0
, arg
, 8);
1731 if (!(flags
& TCG_BSWAP_IZ
)) {
1732 tcg_gen_ext8u_i64(t0
, t0
);
1735 if (flags
& TCG_BSWAP_OS
) {
1736 tcg_gen_shli_i64(t1
, arg
, 56);
1737 tcg_gen_sari_i64(t1
, t1
, 48);
1738 } else if (flags
& TCG_BSWAP_OZ
) {
1739 tcg_gen_ext8u_i64(t1
, arg
);
1740 tcg_gen_shli_i64(t1
, t1
, 8);
1742 tcg_gen_shli_i64(t1
, arg
, 8);
1745 tcg_gen_or_i64(ret
, t0
, t1
);
1746 tcg_temp_free_i64(t0
);
1747 tcg_temp_free_i64(t1
);
1751 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1753 /* Only one extension flag may be present. */
1754 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1756 if (TCG_TARGET_REG_BITS
== 32) {
1757 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1758 if (flags
& TCG_BSWAP_OS
) {
1759 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1761 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1763 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1764 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
1766 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1767 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1768 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
1770 /* arg = xxxxabcd */
1771 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
1772 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
1773 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
1774 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
1775 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
1777 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
1778 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
1779 if (flags
& TCG_BSWAP_OS
) {
1780 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
1782 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
1784 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba */
1786 tcg_temp_free_i64(t0
);
1787 tcg_temp_free_i64(t1
);
1791 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1793 if (TCG_TARGET_REG_BITS
== 32) {
1795 t0
= tcg_temp_ebb_new_i32();
1796 t1
= tcg_temp_ebb_new_i32();
1798 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1799 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1800 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1801 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1802 tcg_temp_free_i32(t0
);
1803 tcg_temp_free_i32(t1
);
1804 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1805 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
1807 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1808 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1809 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
1811 /* arg = abcdefgh */
1812 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
1813 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
1814 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
1815 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
1816 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
1817 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
1819 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
1820 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
1821 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
1822 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
1823 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
1824 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
1826 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
1827 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
1828 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
1830 tcg_temp_free_i64(t0
);
1831 tcg_temp_free_i64(t1
);
1832 tcg_temp_free_i64(t2
);
1836 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1838 uint64_t m
= 0x0000ffff0000ffffull
;
1839 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1840 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1842 /* See include/qemu/bitops.h, hswap64. */
1843 tcg_gen_rotli_i64(t1
, arg
, 32);
1844 tcg_gen_andi_i64(t0
, t1
, m
);
1845 tcg_gen_shli_i64(t0
, t0
, 16);
1846 tcg_gen_shri_i64(t1
, t1
, 16);
1847 tcg_gen_andi_i64(t1
, t1
, m
);
1848 tcg_gen_or_i64(ret
, t0
, t1
);
1850 tcg_temp_free_i64(t0
);
1851 tcg_temp_free_i64(t1
);
1854 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1856 /* Swapping 2 32-bit elements is a rotate. */
1857 tcg_gen_rotli_i64(ret
, arg
, 32);
1860 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1862 if (TCG_TARGET_REG_BITS
== 32) {
1863 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1864 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1865 } else if (TCG_TARGET_HAS_not_i64
) {
1866 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1868 tcg_gen_xori_i64(ret
, arg
, -1);
1872 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1874 if (TCG_TARGET_REG_BITS
== 32) {
1875 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1876 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1877 } else if (TCG_TARGET_HAS_andc_i64
) {
1878 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1880 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1881 tcg_gen_not_i64(t0
, arg2
);
1882 tcg_gen_and_i64(ret
, arg1
, t0
);
1883 tcg_temp_free_i64(t0
);
1887 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1889 if (TCG_TARGET_REG_BITS
== 32) {
1890 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1891 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1892 } else if (TCG_TARGET_HAS_eqv_i64
) {
1893 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1895 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1896 tcg_gen_not_i64(ret
, ret
);
1900 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1902 if (TCG_TARGET_REG_BITS
== 32) {
1903 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1904 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1905 } else if (TCG_TARGET_HAS_nand_i64
) {
1906 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1908 tcg_gen_and_i64(ret
, arg1
, arg2
);
1909 tcg_gen_not_i64(ret
, ret
);
1913 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1915 if (TCG_TARGET_REG_BITS
== 32) {
1916 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1917 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1918 } else if (TCG_TARGET_HAS_nor_i64
) {
1919 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1921 tcg_gen_or_i64(ret
, arg1
, arg2
);
1922 tcg_gen_not_i64(ret
, ret
);
1926 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1928 if (TCG_TARGET_REG_BITS
== 32) {
1929 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1930 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1931 } else if (TCG_TARGET_HAS_orc_i64
) {
1932 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1934 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1935 tcg_gen_not_i64(t0
, arg2
);
1936 tcg_gen_or_i64(ret
, arg1
, t0
);
1937 tcg_temp_free_i64(t0
);
1941 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1943 if (TCG_TARGET_HAS_clz_i64
) {
1944 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
1946 gen_helper_clz_i64(ret
, arg1
, arg2
);
1950 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1952 if (TCG_TARGET_REG_BITS
== 32
1953 && TCG_TARGET_HAS_clz_i32
1954 && arg2
<= 0xffffffffu
) {
1955 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1956 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
1957 tcg_gen_addi_i32(t
, t
, 32);
1958 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
1959 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1960 tcg_temp_free_i32(t
);
1962 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1966 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1968 if (TCG_TARGET_HAS_ctz_i64
) {
1969 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
1970 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
1971 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
1973 if (TCG_TARGET_HAS_ctpop_i64
) {
1974 tcg_gen_subi_i64(t
, arg1
, 1);
1975 tcg_gen_andc_i64(t
, t
, arg1
);
1976 tcg_gen_ctpop_i64(t
, t
);
1978 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
1979 tcg_gen_neg_i64(t
, arg1
);
1980 tcg_gen_and_i64(t
, t
, arg1
);
1981 tcg_gen_clzi_i64(t
, t
, 64);
1982 tcg_gen_xori_i64(t
, t
, 63);
1984 z
= tcg_constant_i64(0);
1985 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
1986 tcg_temp_free_i64(t
);
1987 tcg_temp_free_i64(z
);
1989 gen_helper_ctz_i64(ret
, arg1
, arg2
);
1993 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1995 if (TCG_TARGET_REG_BITS
== 32
1996 && TCG_TARGET_HAS_ctz_i32
1997 && arg2
<= 0xffffffffu
) {
1998 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
1999 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2000 tcg_gen_addi_i32(t32
, t32
, 32);
2001 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2002 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2003 tcg_temp_free_i32(t32
);
2004 } else if (!TCG_TARGET_HAS_ctz_i64
2005 && TCG_TARGET_HAS_ctpop_i64
2007 /* This equivalence has the advantage of not requiring a fixup. */
2008 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2009 tcg_gen_subi_i64(t
, arg1
, 1);
2010 tcg_gen_andc_i64(t
, t
, arg1
);
2011 tcg_gen_ctpop_i64(ret
, t
);
2012 tcg_temp_free_i64(t
);
2014 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2018 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2020 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2021 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2022 tcg_gen_sari_i64(t
, arg
, 63);
2023 tcg_gen_xor_i64(t
, t
, arg
);
2024 tcg_gen_clzi_i64(t
, t
, 64);
2025 tcg_gen_subi_i64(ret
, t
, 1);
2026 tcg_temp_free_i64(t
);
2028 gen_helper_clrsb_i64(ret
, arg
);
2032 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2034 if (TCG_TARGET_HAS_ctpop_i64
) {
2035 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2036 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2037 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2038 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2039 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2040 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2042 gen_helper_ctpop_i64(ret
, arg1
);
2046 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2048 if (TCG_TARGET_HAS_rot_i64
) {
2049 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2052 t0
= tcg_temp_ebb_new_i64();
2053 t1
= tcg_temp_ebb_new_i64();
2054 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2055 tcg_gen_subfi_i64(t1
, 64, arg2
);
2056 tcg_gen_shr_i64(t1
, arg1
, t1
);
2057 tcg_gen_or_i64(ret
, t0
, t1
);
2058 tcg_temp_free_i64(t0
);
2059 tcg_temp_free_i64(t1
);
2063 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2065 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2066 /* some cases can be optimized here */
2068 tcg_gen_mov_i64(ret
, arg1
);
2069 } else if (TCG_TARGET_HAS_rot_i64
) {
2070 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2073 t0
= tcg_temp_ebb_new_i64();
2074 t1
= tcg_temp_ebb_new_i64();
2075 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2076 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2077 tcg_gen_or_i64(ret
, t0
, t1
);
2078 tcg_temp_free_i64(t0
);
2079 tcg_temp_free_i64(t1
);
2083 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2085 if (TCG_TARGET_HAS_rot_i64
) {
2086 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2089 t0
= tcg_temp_ebb_new_i64();
2090 t1
= tcg_temp_ebb_new_i64();
2091 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2092 tcg_gen_subfi_i64(t1
, 64, arg2
);
2093 tcg_gen_shl_i64(t1
, arg1
, t1
);
2094 tcg_gen_or_i64(ret
, t0
, t1
);
2095 tcg_temp_free_i64(t0
);
2096 tcg_temp_free_i64(t1
);
2100 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2102 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2103 /* some cases can be optimized here */
2105 tcg_gen_mov_i64(ret
, arg1
);
2107 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2111 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2112 unsigned int ofs
, unsigned int len
)
2117 tcg_debug_assert(ofs
< 64);
2118 tcg_debug_assert(len
> 0);
2119 tcg_debug_assert(len
<= 64);
2120 tcg_debug_assert(ofs
+ len
<= 64);
2123 tcg_gen_mov_i64(ret
, arg2
);
2126 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2127 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2131 if (TCG_TARGET_REG_BITS
== 32) {
2133 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2134 TCGV_LOW(arg2
), ofs
- 32, len
);
2135 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2138 if (ofs
+ len
<= 32) {
2139 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2140 TCGV_LOW(arg2
), ofs
, len
);
2141 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2146 t1
= tcg_temp_ebb_new_i64();
2148 if (TCG_TARGET_HAS_extract2_i64
) {
2149 if (ofs
+ len
== 64) {
2150 tcg_gen_shli_i64(t1
, arg1
, len
);
2151 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2155 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2156 tcg_gen_rotli_i64(ret
, ret
, len
);
2161 mask
= (1ull << len
) - 1;
2162 if (ofs
+ len
< 64) {
2163 tcg_gen_andi_i64(t1
, arg2
, mask
);
2164 tcg_gen_shli_i64(t1
, t1
, ofs
);
2166 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2168 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2169 tcg_gen_or_i64(ret
, ret
, t1
);
2171 tcg_temp_free_i64(t1
);
2174 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2175 unsigned int ofs
, unsigned int len
)
2177 tcg_debug_assert(ofs
< 64);
2178 tcg_debug_assert(len
> 0);
2179 tcg_debug_assert(len
<= 64);
2180 tcg_debug_assert(ofs
+ len
<= 64);
2182 if (ofs
+ len
== 64) {
2183 tcg_gen_shli_i64(ret
, arg
, ofs
);
2184 } else if (ofs
== 0) {
2185 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2186 } else if (TCG_TARGET_HAS_deposit_i64
2187 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2188 TCGv_i64 zero
= tcg_constant_i64(0);
2189 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2191 if (TCG_TARGET_REG_BITS
== 32) {
2193 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2195 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2198 if (ofs
+ len
<= 32) {
2199 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2200 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2204 /* To help two-operand hosts we prefer to zero-extend first,
2205 which allows ARG to stay live. */
2208 if (TCG_TARGET_HAS_ext32u_i64
) {
2209 tcg_gen_ext32u_i64(ret
, arg
);
2210 tcg_gen_shli_i64(ret
, ret
, ofs
);
2215 if (TCG_TARGET_HAS_ext16u_i64
) {
2216 tcg_gen_ext16u_i64(ret
, arg
);
2217 tcg_gen_shli_i64(ret
, ret
, ofs
);
2222 if (TCG_TARGET_HAS_ext8u_i64
) {
2223 tcg_gen_ext8u_i64(ret
, arg
);
2224 tcg_gen_shli_i64(ret
, ret
, ofs
);
2229 /* Otherwise prefer zero-extension over AND for code size. */
2230 switch (ofs
+ len
) {
2232 if (TCG_TARGET_HAS_ext32u_i64
) {
2233 tcg_gen_shli_i64(ret
, arg
, ofs
);
2234 tcg_gen_ext32u_i64(ret
, ret
);
2239 if (TCG_TARGET_HAS_ext16u_i64
) {
2240 tcg_gen_shli_i64(ret
, arg
, ofs
);
2241 tcg_gen_ext16u_i64(ret
, ret
);
2246 if (TCG_TARGET_HAS_ext8u_i64
) {
2247 tcg_gen_shli_i64(ret
, arg
, ofs
);
2248 tcg_gen_ext8u_i64(ret
, ret
);
2253 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2254 tcg_gen_shli_i64(ret
, ret
, ofs
);
2258 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2259 unsigned int ofs
, unsigned int len
)
2261 tcg_debug_assert(ofs
< 64);
2262 tcg_debug_assert(len
> 0);
2263 tcg_debug_assert(len
<= 64);
2264 tcg_debug_assert(ofs
+ len
<= 64);
2266 /* Canonicalize certain special cases, even if extract is supported. */
2267 if (ofs
+ len
== 64) {
2268 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2272 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2276 if (TCG_TARGET_REG_BITS
== 32) {
2277 /* Look for a 32-bit extract within one of the two words. */
2279 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2280 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2283 if (ofs
+ len
<= 32) {
2284 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2285 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2288 /* The field is split across two words. One double-word
2289 shift is better than two double-word shifts. */
2293 if (TCG_TARGET_HAS_extract_i64
2294 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2295 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2299 /* Assume that zero-extension, if available, is cheaper than a shift. */
2300 switch (ofs
+ len
) {
2302 if (TCG_TARGET_HAS_ext32u_i64
) {
2303 tcg_gen_ext32u_i64(ret
, arg
);
2304 tcg_gen_shri_i64(ret
, ret
, ofs
);
2309 if (TCG_TARGET_HAS_ext16u_i64
) {
2310 tcg_gen_ext16u_i64(ret
, arg
);
2311 tcg_gen_shri_i64(ret
, ret
, ofs
);
2316 if (TCG_TARGET_HAS_ext8u_i64
) {
2317 tcg_gen_ext8u_i64(ret
, arg
);
2318 tcg_gen_shri_i64(ret
, ret
, ofs
);
2324 /* ??? Ideally we'd know what values are available for immediate AND.
2325 Assume that 8 bits are available, plus the special cases of 16 and 32,
2326 so that we get ext8u, ext16u, and ext32u. */
2328 case 1 ... 8: case 16: case 32:
2330 tcg_gen_shri_i64(ret
, arg
, ofs
);
2331 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2334 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2335 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2340 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2341 unsigned int ofs
, unsigned int len
)
2343 tcg_debug_assert(ofs
< 64);
2344 tcg_debug_assert(len
> 0);
2345 tcg_debug_assert(len
<= 64);
2346 tcg_debug_assert(ofs
+ len
<= 64);
2348 /* Canonicalize certain special cases, even if sextract is supported. */
2349 if (ofs
+ len
== 64) {
2350 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2356 tcg_gen_ext32s_i64(ret
, arg
);
2359 tcg_gen_ext16s_i64(ret
, arg
);
2362 tcg_gen_ext8s_i64(ret
, arg
);
2367 if (TCG_TARGET_REG_BITS
== 32) {
2368 /* Look for a 32-bit extract within one of the two words. */
2370 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2371 } else if (ofs
+ len
<= 32) {
2372 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2373 } else if (ofs
== 0) {
2374 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2375 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2377 } else if (len
> 32) {
2378 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2379 /* Extract the bits for the high word normally. */
2380 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2381 /* Shift the field down for the low part. */
2382 tcg_gen_shri_i64(ret
, arg
, ofs
);
2383 /* Overwrite the shift into the high part. */
2384 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2385 tcg_temp_free_i32(t
);
2388 /* Shift the field down for the low part, such that the
2389 field sits at the MSB. */
2390 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2391 /* Shift the field down from the MSB, sign extending. */
2392 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2394 /* Sign-extend the field from 32 bits. */
2395 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2399 if (TCG_TARGET_HAS_sextract_i64
2400 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2401 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2405 /* Assume that sign-extension, if available, is cheaper than a shift. */
2406 switch (ofs
+ len
) {
2408 if (TCG_TARGET_HAS_ext32s_i64
) {
2409 tcg_gen_ext32s_i64(ret
, arg
);
2410 tcg_gen_sari_i64(ret
, ret
, ofs
);
2415 if (TCG_TARGET_HAS_ext16s_i64
) {
2416 tcg_gen_ext16s_i64(ret
, arg
);
2417 tcg_gen_sari_i64(ret
, ret
, ofs
);
2422 if (TCG_TARGET_HAS_ext8s_i64
) {
2423 tcg_gen_ext8s_i64(ret
, arg
);
2424 tcg_gen_sari_i64(ret
, ret
, ofs
);
2431 if (TCG_TARGET_HAS_ext32s_i64
) {
2432 tcg_gen_shri_i64(ret
, arg
, ofs
);
2433 tcg_gen_ext32s_i64(ret
, ret
);
2438 if (TCG_TARGET_HAS_ext16s_i64
) {
2439 tcg_gen_shri_i64(ret
, arg
, ofs
);
2440 tcg_gen_ext16s_i64(ret
, ret
);
2445 if (TCG_TARGET_HAS_ext8s_i64
) {
2446 tcg_gen_shri_i64(ret
, arg
, ofs
);
2447 tcg_gen_ext8s_i64(ret
, ret
);
2452 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2453 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2457 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2458 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2460 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2463 tcg_debug_assert(ofs
<= 64);
2465 tcg_gen_mov_i64(ret
, al
);
2466 } else if (ofs
== 64) {
2467 tcg_gen_mov_i64(ret
, ah
);
2468 } else if (al
== ah
) {
2469 tcg_gen_rotri_i64(ret
, al
, ofs
);
2470 } else if (TCG_TARGET_HAS_extract2_i64
) {
2471 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2473 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2474 tcg_gen_shri_i64(t0
, al
, ofs
);
2475 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2476 tcg_temp_free_i64(t0
);
2480 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2481 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2483 if (cond
== TCG_COND_ALWAYS
) {
2484 tcg_gen_mov_i64(ret
, v1
);
2485 } else if (cond
== TCG_COND_NEVER
) {
2486 tcg_gen_mov_i64(ret
, v2
);
2487 } else if (TCG_TARGET_REG_BITS
== 32) {
2488 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2489 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
2490 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2491 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2492 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2494 if (TCG_TARGET_HAS_movcond_i32
) {
2495 tcg_gen_movi_i32(t1
, 0);
2496 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2497 TCGV_LOW(v1
), TCGV_LOW(v2
));
2498 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2499 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2501 tcg_gen_neg_i32(t0
, t0
);
2503 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2504 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2505 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2507 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2508 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2509 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2511 tcg_temp_free_i32(t0
);
2512 tcg_temp_free_i32(t1
);
2513 } else if (TCG_TARGET_HAS_movcond_i64
) {
2514 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2516 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2517 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2518 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2519 tcg_gen_neg_i64(t0
, t0
);
2520 tcg_gen_and_i64(t1
, v1
, t0
);
2521 tcg_gen_andc_i64(ret
, v2
, t0
);
2522 tcg_gen_or_i64(ret
, ret
, t1
);
2523 tcg_temp_free_i64(t0
);
2524 tcg_temp_free_i64(t1
);
2528 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2529 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2531 if (TCG_TARGET_HAS_add2_i64
) {
2532 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2534 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2535 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2536 tcg_gen_add_i64(t0
, al
, bl
);
2537 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2538 tcg_gen_add_i64(rh
, ah
, bh
);
2539 tcg_gen_add_i64(rh
, rh
, t1
);
2540 tcg_gen_mov_i64(rl
, t0
);
2541 tcg_temp_free_i64(t0
);
2542 tcg_temp_free_i64(t1
);
2546 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2547 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2549 if (TCG_TARGET_HAS_sub2_i64
) {
2550 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2552 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2553 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2554 tcg_gen_sub_i64(t0
, al
, bl
);
2555 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2556 tcg_gen_sub_i64(rh
, ah
, bh
);
2557 tcg_gen_sub_i64(rh
, rh
, t1
);
2558 tcg_gen_mov_i64(rl
, t0
);
2559 tcg_temp_free_i64(t0
);
2560 tcg_temp_free_i64(t1
);
2564 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2566 if (TCG_TARGET_HAS_mulu2_i64
) {
2567 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2568 } else if (TCG_TARGET_HAS_muluh_i64
) {
2569 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2570 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2571 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2572 tcg_gen_mov_i64(rl
, t
);
2573 tcg_temp_free_i64(t
);
2575 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2576 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2577 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2578 tcg_gen_mov_i64(rl
, t0
);
2579 tcg_temp_free_i64(t0
);
2583 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2585 if (TCG_TARGET_HAS_muls2_i64
) {
2586 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2587 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2588 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2589 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2590 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2591 tcg_gen_mov_i64(rl
, t
);
2592 tcg_temp_free_i64(t
);
2593 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2594 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2595 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2596 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2597 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
2598 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2599 /* Adjust for negative inputs. */
2600 tcg_gen_sari_i64(t2
, arg1
, 63);
2601 tcg_gen_sari_i64(t3
, arg2
, 63);
2602 tcg_gen_and_i64(t2
, t2
, arg2
);
2603 tcg_gen_and_i64(t3
, t3
, arg1
);
2604 tcg_gen_sub_i64(rh
, t1
, t2
);
2605 tcg_gen_sub_i64(rh
, rh
, t3
);
2606 tcg_gen_mov_i64(rl
, t0
);
2607 tcg_temp_free_i64(t0
);
2608 tcg_temp_free_i64(t1
);
2609 tcg_temp_free_i64(t2
);
2610 tcg_temp_free_i64(t3
);
2612 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2613 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2614 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2615 tcg_gen_mov_i64(rl
, t0
);
2616 tcg_temp_free_i64(t0
);
2620 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2622 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2623 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2624 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2625 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2626 /* Adjust for negative input for the signed arg1. */
2627 tcg_gen_sari_i64(t2
, arg1
, 63);
2628 tcg_gen_and_i64(t2
, t2
, arg2
);
2629 tcg_gen_sub_i64(rh
, t1
, t2
);
2630 tcg_gen_mov_i64(rl
, t0
);
2631 tcg_temp_free_i64(t0
);
2632 tcg_temp_free_i64(t1
);
2633 tcg_temp_free_i64(t2
);
2636 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2638 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
2641 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2643 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
2646 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2648 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
2651 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2653 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
2656 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
2658 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2660 tcg_gen_sari_i64(t
, a
, 63);
2661 tcg_gen_xor_i64(ret
, a
, t
);
2662 tcg_gen_sub_i64(ret
, ret
, t
);
2663 tcg_temp_free_i64(t
);
2666 /* Size changing operations. */
2668 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2670 if (TCG_TARGET_REG_BITS
== 32) {
2671 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2672 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
2673 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2674 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2676 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2680 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2682 if (TCG_TARGET_REG_BITS
== 32) {
2683 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2684 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
2685 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2686 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2688 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2689 tcg_gen_shri_i64(t
, arg
, 32);
2690 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2691 tcg_temp_free_i64(t
);
2695 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2697 if (TCG_TARGET_REG_BITS
== 32) {
2698 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2699 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2701 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2702 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2706 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2708 if (TCG_TARGET_REG_BITS
== 32) {
2709 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2710 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2712 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2713 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2717 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2721 if (TCG_TARGET_REG_BITS
== 32) {
2722 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2723 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2727 tmp
= tcg_temp_ebb_new_i64();
2728 /* These extensions are only needed for type correctness.
2729 We may be able to do better given target specific information. */
2730 tcg_gen_extu_i32_i64(tmp
, high
);
2731 tcg_gen_extu_i32_i64(dest
, low
);
2732 /* If deposit is available, use it. Otherwise use the extra
2733 knowledge that we have of the zero-extensions above. */
2734 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2735 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2737 tcg_gen_shli_i64(tmp
, tmp
, 32);
2738 tcg_gen_or_i64(dest
, dest
, tmp
);
2740 tcg_temp_free_i64(tmp
);
2743 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2745 if (TCG_TARGET_REG_BITS
== 32) {
2746 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2747 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2749 tcg_gen_extrl_i64_i32(lo
, arg
);
2750 tcg_gen_extrh_i64_i32(hi
, arg
);
2754 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2756 tcg_gen_ext32u_i64(lo
, arg
);
2757 tcg_gen_shri_i64(hi
, arg
, 32);
2760 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
2762 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
2763 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
2766 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
2768 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
2769 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
2772 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
2775 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
2776 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
2780 /* QEMU specific operations. */
2782 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
2785 * Let the jit code return the read-only version of the
2786 * TranslationBlock, so that we minimize the pc-relative
2787 * distance of the address of the exit_tb code to TB.
2788 * This will improve utilization of pc-relative address loads.
2790 * TODO: Move this to translator_loop, so that all const
2791 * TranslationBlock pointers refer to read-only memory.
2792 * This requires coordination with targets that do not use
2793 * the translator_loop.
2795 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
2798 tcg_debug_assert(idx
== 0);
2799 } else if (idx
<= TB_EXIT_IDXMAX
) {
2800 #ifdef CONFIG_DEBUG_TCG
2801 /* This is an exit following a goto_tb. Verify that we have
2802 seen this numbered exit before, via tcg_gen_goto_tb. */
2803 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
2806 /* This is an exit via the exitreq label. */
2807 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
2810 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2813 void tcg_gen_goto_tb(unsigned idx
)
2815 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2816 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
2817 /* We only support two chained exits. */
2818 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
2819 #ifdef CONFIG_DEBUG_TCG
2820 /* Verify that we haven't seen this numbered exit before. */
2821 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2822 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2824 plugin_gen_disable_mem_helpers();
2825 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2828 void tcg_gen_lookup_and_goto_ptr(void)
2832 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
2833 tcg_gen_exit_tb(NULL
, 0);
2837 plugin_gen_disable_mem_helpers();
2838 ptr
= tcg_temp_ebb_new_ptr();
2839 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2840 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2841 tcg_temp_free_ptr(ptr
);