2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
34 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
36 TCGOp
*op
= tcg_emit_op(opc
, 1);
40 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
42 TCGOp
*op
= tcg_emit_op(opc
, 2);
47 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
49 TCGOp
*op
= tcg_emit_op(opc
, 3);
55 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
57 TCGOp
*op
= tcg_emit_op(opc
, 4);
64 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
67 TCGOp
*op
= tcg_emit_op(opc
, 5);
75 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
76 TCGArg a4
, TCGArg a5
, TCGArg a6
)
78 TCGOp
*op
= tcg_emit_op(opc
, 6);
89 static void add_last_as_label_use(TCGLabel
*l
)
91 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
93 u
->op
= tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
97 void tcg_gen_br(TCGLabel
*l
)
99 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
100 add_last_as_label_use(l
);
103 void tcg_gen_mb(TCGBar mb_type
)
105 #ifdef CONFIG_USER_ONLY
106 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
109 * It is tempting to elide the barrier in a uniprocessor context.
110 * However, even with a single cpu we have i/o threads running in
111 * parallel, and lack of memory order can result in e.g. virtio
112 * queue entries being read incorrectly.
114 bool parallel
= true;
118 tcg_gen_op1(INDEX_op_mb
, mb_type
);
124 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
126 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
129 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
131 /* some cases can be optimized here */
133 tcg_gen_mov_i32(ret
, arg1
);
135 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
139 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
141 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
142 /* Don't recurse with tcg_gen_neg_i32. */
143 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
145 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
149 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
151 /* some cases can be optimized here */
153 tcg_gen_mov_i32(ret
, arg1
);
155 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
159 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
161 /* Some cases can be optimized here. */
164 tcg_gen_movi_i32(ret
, 0);
167 tcg_gen_mov_i32(ret
, arg1
);
170 /* Don't recurse with tcg_gen_ext8u_i32. */
171 if (TCG_TARGET_HAS_ext8u_i32
) {
172 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
177 if (TCG_TARGET_HAS_ext16u_i32
) {
178 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
184 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
187 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_movi_i32(ret
, -1);
192 } else if (arg2
== 0) {
193 tcg_gen_mov_i32(ret
, arg1
);
195 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
199 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
201 /* Some cases can be optimized here. */
203 tcg_gen_mov_i32(ret
, arg1
);
204 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
205 /* Don't recurse with tcg_gen_not_i32. */
206 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
208 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
212 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
214 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
216 tcg_gen_mov_i32(ret
, arg1
);
218 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
222 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
224 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
226 tcg_gen_mov_i32(ret
, arg1
);
228 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
232 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
234 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
236 tcg_gen_mov_i32(ret
, arg1
);
238 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
242 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
244 if (cond
== TCG_COND_ALWAYS
) {
246 } else if (cond
!= TCG_COND_NEVER
) {
247 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
248 add_last_as_label_use(l
);
252 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
254 if (cond
== TCG_COND_ALWAYS
) {
256 } else if (cond
!= TCG_COND_NEVER
) {
257 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
261 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
262 TCGv_i32 arg1
, TCGv_i32 arg2
)
264 if (cond
== TCG_COND_ALWAYS
) {
265 tcg_gen_movi_i32(ret
, 1);
266 } else if (cond
== TCG_COND_NEVER
) {
267 tcg_gen_movi_i32(ret
, 0);
269 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
273 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
274 TCGv_i32 arg1
, int32_t arg2
)
276 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
279 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
280 TCGv_i32 arg1
, TCGv_i32 arg2
)
282 if (cond
== TCG_COND_ALWAYS
) {
283 tcg_gen_movi_i32(ret
, -1);
284 } else if (cond
== TCG_COND_NEVER
) {
285 tcg_gen_movi_i32(ret
, 0);
286 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
287 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
289 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
290 tcg_gen_neg_i32(ret
, ret
);
294 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
297 tcg_gen_movi_i32(ret
, 0);
298 } else if (is_power_of_2(arg2
)) {
299 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
301 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
305 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
307 if (TCG_TARGET_HAS_div_i32
) {
308 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
309 } else if (TCG_TARGET_HAS_div2_i32
) {
310 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
311 tcg_gen_sari_i32(t0
, arg1
, 31);
312 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
313 tcg_temp_free_i32(t0
);
315 gen_helper_div_i32(ret
, arg1
, arg2
);
319 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
321 if (TCG_TARGET_HAS_rem_i32
) {
322 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
323 } else if (TCG_TARGET_HAS_div_i32
) {
324 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
325 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
326 tcg_gen_mul_i32(t0
, t0
, arg2
);
327 tcg_gen_sub_i32(ret
, arg1
, t0
);
328 tcg_temp_free_i32(t0
);
329 } else if (TCG_TARGET_HAS_div2_i32
) {
330 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
331 tcg_gen_sari_i32(t0
, arg1
, 31);
332 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
333 tcg_temp_free_i32(t0
);
335 gen_helper_rem_i32(ret
, arg1
, arg2
);
339 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
341 if (TCG_TARGET_HAS_div_i32
) {
342 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
343 } else if (TCG_TARGET_HAS_div2_i32
) {
344 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
345 tcg_gen_movi_i32(t0
, 0);
346 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
347 tcg_temp_free_i32(t0
);
349 gen_helper_divu_i32(ret
, arg1
, arg2
);
353 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
355 if (TCG_TARGET_HAS_rem_i32
) {
356 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
357 } else if (TCG_TARGET_HAS_div_i32
) {
358 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
359 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
360 tcg_gen_mul_i32(t0
, t0
, arg2
);
361 tcg_gen_sub_i32(ret
, arg1
, t0
);
362 tcg_temp_free_i32(t0
);
363 } else if (TCG_TARGET_HAS_div2_i32
) {
364 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
365 tcg_gen_movi_i32(t0
, 0);
366 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
367 tcg_temp_free_i32(t0
);
369 gen_helper_remu_i32(ret
, arg1
, arg2
);
373 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
375 if (TCG_TARGET_HAS_andc_i32
) {
376 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
378 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
379 tcg_gen_not_i32(t0
, arg2
);
380 tcg_gen_and_i32(ret
, arg1
, t0
);
381 tcg_temp_free_i32(t0
);
385 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
387 if (TCG_TARGET_HAS_eqv_i32
) {
388 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
390 tcg_gen_xor_i32(ret
, arg1
, arg2
);
391 tcg_gen_not_i32(ret
, ret
);
395 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
397 if (TCG_TARGET_HAS_nand_i32
) {
398 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
400 tcg_gen_and_i32(ret
, arg1
, arg2
);
401 tcg_gen_not_i32(ret
, ret
);
405 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
407 if (TCG_TARGET_HAS_nor_i32
) {
408 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
410 tcg_gen_or_i32(ret
, arg1
, arg2
);
411 tcg_gen_not_i32(ret
, ret
);
415 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
417 if (TCG_TARGET_HAS_orc_i32
) {
418 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
420 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
421 tcg_gen_not_i32(t0
, arg2
);
422 tcg_gen_or_i32(ret
, arg1
, t0
);
423 tcg_temp_free_i32(t0
);
427 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
429 if (TCG_TARGET_HAS_clz_i32
) {
430 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
431 } else if (TCG_TARGET_HAS_clz_i64
) {
432 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
433 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
434 tcg_gen_extu_i32_i64(t1
, arg1
);
435 tcg_gen_extu_i32_i64(t2
, arg2
);
436 tcg_gen_addi_i64(t2
, t2
, 32);
437 tcg_gen_clz_i64(t1
, t1
, t2
);
438 tcg_gen_extrl_i64_i32(ret
, t1
);
439 tcg_temp_free_i64(t1
);
440 tcg_temp_free_i64(t2
);
441 tcg_gen_subi_i32(ret
, ret
, 32);
443 gen_helper_clz_i32(ret
, arg1
, arg2
);
447 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
449 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
452 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
454 if (TCG_TARGET_HAS_ctz_i32
) {
455 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
456 } else if (TCG_TARGET_HAS_ctz_i64
) {
457 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
458 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
459 tcg_gen_extu_i32_i64(t1
, arg1
);
460 tcg_gen_extu_i32_i64(t2
, arg2
);
461 tcg_gen_ctz_i64(t1
, t1
, t2
);
462 tcg_gen_extrl_i64_i32(ret
, t1
);
463 tcg_temp_free_i64(t1
);
464 tcg_temp_free_i64(t2
);
465 } else if (TCG_TARGET_HAS_ctpop_i32
466 || TCG_TARGET_HAS_ctpop_i64
467 || TCG_TARGET_HAS_clz_i32
468 || TCG_TARGET_HAS_clz_i64
) {
469 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
471 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
472 tcg_gen_subi_i32(t
, arg1
, 1);
473 tcg_gen_andc_i32(t
, t
, arg1
);
474 tcg_gen_ctpop_i32(t
, t
);
476 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
477 tcg_gen_neg_i32(t
, arg1
);
478 tcg_gen_and_i32(t
, t
, arg1
);
479 tcg_gen_clzi_i32(t
, t
, 32);
480 tcg_gen_xori_i32(t
, t
, 31);
482 z
= tcg_constant_i32(0);
483 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
484 tcg_temp_free_i32(t
);
486 gen_helper_ctz_i32(ret
, arg1
, arg2
);
490 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
492 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
493 /* This equivalence has the advantage of not requiring a fixup. */
494 TCGv_i32 t
= tcg_temp_ebb_new_i32();
495 tcg_gen_subi_i32(t
, arg1
, 1);
496 tcg_gen_andc_i32(t
, t
, arg1
);
497 tcg_gen_ctpop_i32(ret
, t
);
498 tcg_temp_free_i32(t
);
500 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
504 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
506 if (TCG_TARGET_HAS_clz_i32
) {
507 TCGv_i32 t
= tcg_temp_ebb_new_i32();
508 tcg_gen_sari_i32(t
, arg
, 31);
509 tcg_gen_xor_i32(t
, t
, arg
);
510 tcg_gen_clzi_i32(t
, t
, 32);
511 tcg_gen_subi_i32(ret
, t
, 1);
512 tcg_temp_free_i32(t
);
514 gen_helper_clrsb_i32(ret
, arg
);
518 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
520 if (TCG_TARGET_HAS_ctpop_i32
) {
521 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
522 } else if (TCG_TARGET_HAS_ctpop_i64
) {
523 TCGv_i64 t
= tcg_temp_ebb_new_i64();
524 tcg_gen_extu_i32_i64(t
, arg1
);
525 tcg_gen_ctpop_i64(t
, t
);
526 tcg_gen_extrl_i64_i32(ret
, t
);
527 tcg_temp_free_i64(t
);
529 gen_helper_ctpop_i32(ret
, arg1
);
533 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
535 if (TCG_TARGET_HAS_rot_i32
) {
536 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
540 t0
= tcg_temp_ebb_new_i32();
541 t1
= tcg_temp_ebb_new_i32();
542 tcg_gen_shl_i32(t0
, arg1
, arg2
);
543 tcg_gen_subfi_i32(t1
, 32, arg2
);
544 tcg_gen_shr_i32(t1
, arg1
, t1
);
545 tcg_gen_or_i32(ret
, t0
, t1
);
546 tcg_temp_free_i32(t0
);
547 tcg_temp_free_i32(t1
);
551 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
553 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
554 /* some cases can be optimized here */
556 tcg_gen_mov_i32(ret
, arg1
);
557 } else if (TCG_TARGET_HAS_rot_i32
) {
558 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
561 t0
= tcg_temp_ebb_new_i32();
562 t1
= tcg_temp_ebb_new_i32();
563 tcg_gen_shli_i32(t0
, arg1
, arg2
);
564 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
565 tcg_gen_or_i32(ret
, t0
, t1
);
566 tcg_temp_free_i32(t0
);
567 tcg_temp_free_i32(t1
);
571 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
573 if (TCG_TARGET_HAS_rot_i32
) {
574 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
578 t0
= tcg_temp_ebb_new_i32();
579 t1
= tcg_temp_ebb_new_i32();
580 tcg_gen_shr_i32(t0
, arg1
, arg2
);
581 tcg_gen_subfi_i32(t1
, 32, arg2
);
582 tcg_gen_shl_i32(t1
, arg1
, t1
);
583 tcg_gen_or_i32(ret
, t0
, t1
);
584 tcg_temp_free_i32(t0
);
585 tcg_temp_free_i32(t1
);
589 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
591 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
592 /* some cases can be optimized here */
594 tcg_gen_mov_i32(ret
, arg1
);
596 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
600 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
601 unsigned int ofs
, unsigned int len
)
606 tcg_debug_assert(ofs
< 32);
607 tcg_debug_assert(len
> 0);
608 tcg_debug_assert(len
<= 32);
609 tcg_debug_assert(ofs
+ len
<= 32);
612 tcg_gen_mov_i32(ret
, arg2
);
615 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
616 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
620 t1
= tcg_temp_ebb_new_i32();
622 if (TCG_TARGET_HAS_extract2_i32
) {
623 if (ofs
+ len
== 32) {
624 tcg_gen_shli_i32(t1
, arg1
, len
);
625 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
629 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
630 tcg_gen_rotli_i32(ret
, ret
, len
);
635 mask
= (1u << len
) - 1;
636 if (ofs
+ len
< 32) {
637 tcg_gen_andi_i32(t1
, arg2
, mask
);
638 tcg_gen_shli_i32(t1
, t1
, ofs
);
640 tcg_gen_shli_i32(t1
, arg2
, ofs
);
642 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
643 tcg_gen_or_i32(ret
, ret
, t1
);
645 tcg_temp_free_i32(t1
);
648 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
649 unsigned int ofs
, unsigned int len
)
651 tcg_debug_assert(ofs
< 32);
652 tcg_debug_assert(len
> 0);
653 tcg_debug_assert(len
<= 32);
654 tcg_debug_assert(ofs
+ len
<= 32);
656 if (ofs
+ len
== 32) {
657 tcg_gen_shli_i32(ret
, arg
, ofs
);
658 } else if (ofs
== 0) {
659 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
660 } else if (TCG_TARGET_HAS_deposit_i32
661 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
662 TCGv_i32 zero
= tcg_constant_i32(0);
663 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
665 /* To help two-operand hosts we prefer to zero-extend first,
666 which allows ARG to stay live. */
669 if (TCG_TARGET_HAS_ext16u_i32
) {
670 tcg_gen_ext16u_i32(ret
, arg
);
671 tcg_gen_shli_i32(ret
, ret
, ofs
);
676 if (TCG_TARGET_HAS_ext8u_i32
) {
677 tcg_gen_ext8u_i32(ret
, arg
);
678 tcg_gen_shli_i32(ret
, ret
, ofs
);
683 /* Otherwise prefer zero-extension over AND for code size. */
686 if (TCG_TARGET_HAS_ext16u_i32
) {
687 tcg_gen_shli_i32(ret
, arg
, ofs
);
688 tcg_gen_ext16u_i32(ret
, ret
);
693 if (TCG_TARGET_HAS_ext8u_i32
) {
694 tcg_gen_shli_i32(ret
, arg
, ofs
);
695 tcg_gen_ext8u_i32(ret
, ret
);
700 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
701 tcg_gen_shli_i32(ret
, ret
, ofs
);
705 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
706 unsigned int ofs
, unsigned int len
)
708 tcg_debug_assert(ofs
< 32);
709 tcg_debug_assert(len
> 0);
710 tcg_debug_assert(len
<= 32);
711 tcg_debug_assert(ofs
+ len
<= 32);
713 /* Canonicalize certain special cases, even if extract is supported. */
714 if (ofs
+ len
== 32) {
715 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
719 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
723 if (TCG_TARGET_HAS_extract_i32
724 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
725 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
729 /* Assume that zero-extension, if available, is cheaper than a shift. */
732 if (TCG_TARGET_HAS_ext16u_i32
) {
733 tcg_gen_ext16u_i32(ret
, arg
);
734 tcg_gen_shri_i32(ret
, ret
, ofs
);
739 if (TCG_TARGET_HAS_ext8u_i32
) {
740 tcg_gen_ext8u_i32(ret
, arg
);
741 tcg_gen_shri_i32(ret
, ret
, ofs
);
747 /* ??? Ideally we'd know what values are available for immediate AND.
748 Assume that 8 bits are available, plus the special case of 16,
749 so that we get ext8u, ext16u. */
751 case 1 ... 8: case 16:
752 tcg_gen_shri_i32(ret
, arg
, ofs
);
753 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
756 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
757 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
762 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
763 unsigned int ofs
, unsigned int len
)
765 tcg_debug_assert(ofs
< 32);
766 tcg_debug_assert(len
> 0);
767 tcg_debug_assert(len
<= 32);
768 tcg_debug_assert(ofs
+ len
<= 32);
770 /* Canonicalize certain special cases, even if extract is supported. */
771 if (ofs
+ len
== 32) {
772 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
778 tcg_gen_ext16s_i32(ret
, arg
);
781 tcg_gen_ext8s_i32(ret
, arg
);
786 if (TCG_TARGET_HAS_sextract_i32
787 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
788 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
792 /* Assume that sign-extension, if available, is cheaper than a shift. */
795 if (TCG_TARGET_HAS_ext16s_i32
) {
796 tcg_gen_ext16s_i32(ret
, arg
);
797 tcg_gen_sari_i32(ret
, ret
, ofs
);
802 if (TCG_TARGET_HAS_ext8s_i32
) {
803 tcg_gen_ext8s_i32(ret
, arg
);
804 tcg_gen_sari_i32(ret
, ret
, ofs
);
811 if (TCG_TARGET_HAS_ext16s_i32
) {
812 tcg_gen_shri_i32(ret
, arg
, ofs
);
813 tcg_gen_ext16s_i32(ret
, ret
);
818 if (TCG_TARGET_HAS_ext8s_i32
) {
819 tcg_gen_shri_i32(ret
, arg
, ofs
);
820 tcg_gen_ext8s_i32(ret
, ret
);
826 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
827 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
831 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
834 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
837 tcg_debug_assert(ofs
<= 32);
839 tcg_gen_mov_i32(ret
, al
);
840 } else if (ofs
== 32) {
841 tcg_gen_mov_i32(ret
, ah
);
842 } else if (al
== ah
) {
843 tcg_gen_rotri_i32(ret
, al
, ofs
);
844 } else if (TCG_TARGET_HAS_extract2_i32
) {
845 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
847 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
848 tcg_gen_shri_i32(t0
, al
, ofs
);
849 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
850 tcg_temp_free_i32(t0
);
854 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
855 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
857 if (cond
== TCG_COND_ALWAYS
) {
858 tcg_gen_mov_i32(ret
, v1
);
859 } else if (cond
== TCG_COND_NEVER
) {
860 tcg_gen_mov_i32(ret
, v2
);
861 } else if (TCG_TARGET_HAS_movcond_i32
) {
862 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
864 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
865 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
866 tcg_gen_negsetcond_i32(cond
, t0
, c1
, c2
);
867 tcg_gen_and_i32(t1
, v1
, t0
);
868 tcg_gen_andc_i32(ret
, v2
, t0
);
869 tcg_gen_or_i32(ret
, ret
, t1
);
870 tcg_temp_free_i32(t0
);
871 tcg_temp_free_i32(t1
);
875 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
876 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
878 if (TCG_TARGET_HAS_add2_i32
) {
879 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
881 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
882 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
883 tcg_gen_concat_i32_i64(t0
, al
, ah
);
884 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
885 tcg_gen_add_i64(t0
, t0
, t1
);
886 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
887 tcg_temp_free_i64(t0
);
888 tcg_temp_free_i64(t1
);
892 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
893 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
895 if (TCG_TARGET_HAS_sub2_i32
) {
896 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
898 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
899 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
900 tcg_gen_concat_i32_i64(t0
, al
, ah
);
901 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
902 tcg_gen_sub_i64(t0
, t0
, t1
);
903 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
904 tcg_temp_free_i64(t0
);
905 tcg_temp_free_i64(t1
);
909 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
911 if (TCG_TARGET_HAS_mulu2_i32
) {
912 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
913 } else if (TCG_TARGET_HAS_muluh_i32
) {
914 TCGv_i32 t
= tcg_temp_ebb_new_i32();
915 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
916 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
917 tcg_gen_mov_i32(rl
, t
);
918 tcg_temp_free_i32(t
);
919 } else if (TCG_TARGET_REG_BITS
== 64) {
920 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
921 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
922 tcg_gen_extu_i32_i64(t0
, arg1
);
923 tcg_gen_extu_i32_i64(t1
, arg2
);
924 tcg_gen_mul_i64(t0
, t0
, t1
);
925 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
926 tcg_temp_free_i64(t0
);
927 tcg_temp_free_i64(t1
);
929 qemu_build_not_reached();
933 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
935 if (TCG_TARGET_HAS_muls2_i32
) {
936 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
937 } else if (TCG_TARGET_HAS_mulsh_i32
) {
938 TCGv_i32 t
= tcg_temp_ebb_new_i32();
939 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
940 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
941 tcg_gen_mov_i32(rl
, t
);
942 tcg_temp_free_i32(t
);
943 } else if (TCG_TARGET_REG_BITS
== 32) {
944 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
945 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
946 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
947 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
948 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
949 /* Adjust for negative inputs. */
950 tcg_gen_sari_i32(t2
, arg1
, 31);
951 tcg_gen_sari_i32(t3
, arg2
, 31);
952 tcg_gen_and_i32(t2
, t2
, arg2
);
953 tcg_gen_and_i32(t3
, t3
, arg1
);
954 tcg_gen_sub_i32(rh
, t1
, t2
);
955 tcg_gen_sub_i32(rh
, rh
, t3
);
956 tcg_gen_mov_i32(rl
, t0
);
957 tcg_temp_free_i32(t0
);
958 tcg_temp_free_i32(t1
);
959 tcg_temp_free_i32(t2
);
960 tcg_temp_free_i32(t3
);
962 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
963 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
964 tcg_gen_ext_i32_i64(t0
, arg1
);
965 tcg_gen_ext_i32_i64(t1
, arg2
);
966 tcg_gen_mul_i64(t0
, t0
, t1
);
967 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
968 tcg_temp_free_i64(t0
);
969 tcg_temp_free_i64(t1
);
973 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
975 if (TCG_TARGET_REG_BITS
== 32) {
976 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
977 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
978 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
979 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
980 /* Adjust for negative input for the signed arg1. */
981 tcg_gen_sari_i32(t2
, arg1
, 31);
982 tcg_gen_and_i32(t2
, t2
, arg2
);
983 tcg_gen_sub_i32(rh
, t1
, t2
);
984 tcg_gen_mov_i32(rl
, t0
);
985 tcg_temp_free_i32(t0
);
986 tcg_temp_free_i32(t1
);
987 tcg_temp_free_i32(t2
);
989 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
990 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
991 tcg_gen_ext_i32_i64(t0
, arg1
);
992 tcg_gen_extu_i32_i64(t1
, arg2
);
993 tcg_gen_mul_i64(t0
, t0
, t1
);
994 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
995 tcg_temp_free_i64(t0
);
996 tcg_temp_free_i64(t1
);
1000 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1002 if (TCG_TARGET_HAS_ext8s_i32
) {
1003 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1005 tcg_gen_shli_i32(ret
, arg
, 24);
1006 tcg_gen_sari_i32(ret
, ret
, 24);
1010 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1012 if (TCG_TARGET_HAS_ext16s_i32
) {
1013 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1015 tcg_gen_shli_i32(ret
, arg
, 16);
1016 tcg_gen_sari_i32(ret
, ret
, 16);
1020 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1022 if (TCG_TARGET_HAS_ext8u_i32
) {
1023 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1025 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1029 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1031 if (TCG_TARGET_HAS_ext16u_i32
) {
1032 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1034 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1039 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1041 * Byte pattern: xxab -> yyba
1043 * With TCG_BSWAP_IZ, x == zero, else undefined.
1044 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1046 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1048 /* Only one extension flag may be present. */
1049 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1051 if (TCG_TARGET_HAS_bswap16_i32
) {
1052 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1054 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1055 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1057 /* arg = ..ab (IZ) xxab (!IZ) */
1058 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1059 if (!(flags
& TCG_BSWAP_IZ
)) {
1060 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1063 if (flags
& TCG_BSWAP_OS
) {
1064 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1065 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1066 } else if (flags
& TCG_BSWAP_OZ
) {
1067 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1068 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1070 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1073 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1075 /* = xaba (no flag) */
1076 tcg_temp_free_i32(t0
);
1077 tcg_temp_free_i32(t1
);
1082 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1084 * Byte pattern: abcd -> dcba
1086 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1088 if (TCG_TARGET_HAS_bswap32_i32
) {
1089 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1091 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1092 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1093 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1096 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1097 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1098 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1099 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1100 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1102 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1103 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1104 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1106 tcg_temp_free_i32(t0
);
1107 tcg_temp_free_i32(t1
);
1112 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1114 * Byte pattern: abcd -> cdab
1116 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1118 /* Swapping 2 16-bit elements is a rotate. */
1119 tcg_gen_rotli_i32(ret
, arg
, 16);
1122 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1124 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1127 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1129 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1132 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1134 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1137 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1139 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1142 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1144 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1146 tcg_gen_sari_i32(t
, a
, 31);
1147 tcg_gen_xor_i32(ret
, a
, t
);
1148 tcg_gen_sub_i32(ret
, ret
, t
);
1149 tcg_temp_free_i32(t
);
1154 #if TCG_TARGET_REG_BITS == 32
1155 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1157 void tcg_gen_discard_i64(TCGv_i64 arg
)
1159 tcg_gen_discard_i32(TCGV_LOW(arg
));
1160 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1163 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1165 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1167 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1168 if (ts
->kind
== TEMP_CONST
) {
1169 tcg_gen_movi_i64(ret
, ts
->val
);
1171 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1172 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1176 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1178 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1179 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1182 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1184 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1185 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1188 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1190 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1191 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1194 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1196 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1197 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1200 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1202 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1203 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1206 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1208 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1209 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1212 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1214 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1215 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1218 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1220 /* Since arg2 and ret have different types,
1221 they cannot be the same temporary */
1223 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1224 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1226 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1227 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1231 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1233 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1236 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1238 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1241 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1243 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1246 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1249 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1250 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1252 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1253 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1257 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1259 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1260 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1263 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1265 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1266 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1269 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1271 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1272 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1275 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1277 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1278 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1281 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1283 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1284 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1287 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1289 gen_helper_shl_i64(ret
, arg1
, arg2
);
1292 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1294 gen_helper_shr_i64(ret
, arg1
, arg2
);
1297 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1299 gen_helper_sar_i64(ret
, arg1
, arg2
);
1302 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1307 t0
= tcg_temp_ebb_new_i64();
1308 t1
= tcg_temp_ebb_new_i32();
1310 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1311 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1313 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1314 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1315 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1316 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1318 tcg_gen_mov_i64(ret
, t0
);
1319 tcg_temp_free_i64(t0
);
1320 tcg_temp_free_i32(t1
);
1325 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1327 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1330 #endif /* TCG_TARGET_REG_SIZE == 32 */
1332 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1334 /* some cases can be optimized here */
1336 tcg_gen_mov_i64(ret
, arg1
);
1337 } else if (TCG_TARGET_REG_BITS
== 64) {
1338 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1340 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1341 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1342 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1346 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1348 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1349 /* Don't recurse with tcg_gen_neg_i64. */
1350 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1351 } else if (TCG_TARGET_REG_BITS
== 64) {
1352 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1354 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1355 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1356 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1360 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1362 /* some cases can be optimized here */
1364 tcg_gen_mov_i64(ret
, arg1
);
1365 } else if (TCG_TARGET_REG_BITS
== 64) {
1366 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1368 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1369 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1370 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1374 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1376 if (TCG_TARGET_REG_BITS
== 32) {
1377 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1378 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1382 /* Some cases can be optimized here. */
1385 tcg_gen_movi_i64(ret
, 0);
1388 tcg_gen_mov_i64(ret
, arg1
);
1391 /* Don't recurse with tcg_gen_ext8u_i64. */
1392 if (TCG_TARGET_HAS_ext8u_i64
) {
1393 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1398 if (TCG_TARGET_HAS_ext16u_i64
) {
1399 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1404 if (TCG_TARGET_HAS_ext32u_i64
) {
1405 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1411 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1414 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1416 if (TCG_TARGET_REG_BITS
== 32) {
1417 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1418 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1421 /* Some cases can be optimized here. */
1423 tcg_gen_movi_i64(ret
, -1);
1424 } else if (arg2
== 0) {
1425 tcg_gen_mov_i64(ret
, arg1
);
1427 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1431 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1433 if (TCG_TARGET_REG_BITS
== 32) {
1434 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1435 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1438 /* Some cases can be optimized here. */
1440 tcg_gen_mov_i64(ret
, arg1
);
1441 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1442 /* Don't recurse with tcg_gen_not_i64. */
1443 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1445 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1449 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1450 unsigned c
, bool right
, bool arith
)
1452 tcg_debug_assert(c
< 64);
1454 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1455 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1456 } else if (c
>= 32) {
1460 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1461 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1463 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1464 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1467 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1468 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1471 if (TCG_TARGET_HAS_extract2_i32
) {
1472 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1473 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1475 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1476 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1477 TCGV_HIGH(arg1
), 32 - c
, c
);
1480 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1482 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1485 if (TCG_TARGET_HAS_extract2_i32
) {
1486 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1487 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1489 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1490 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1491 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1492 TCGV_HIGH(arg1
), c
, 32 - c
);
1493 tcg_temp_free_i32(t0
);
1495 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1499 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1501 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1502 if (TCG_TARGET_REG_BITS
== 32) {
1503 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1504 } else if (arg2
== 0) {
1505 tcg_gen_mov_i64(ret
, arg1
);
1507 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1511 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1513 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1514 if (TCG_TARGET_REG_BITS
== 32) {
1515 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1516 } else if (arg2
== 0) {
1517 tcg_gen_mov_i64(ret
, arg1
);
1519 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1523 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1525 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1526 if (TCG_TARGET_REG_BITS
== 32) {
1527 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1528 } else if (arg2
== 0) {
1529 tcg_gen_mov_i64(ret
, arg1
);
1531 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1535 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1537 if (cond
== TCG_COND_ALWAYS
) {
1539 } else if (cond
!= TCG_COND_NEVER
) {
1540 if (TCG_TARGET_REG_BITS
== 32) {
1541 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1542 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1543 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1545 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1548 add_last_as_label_use(l
);
1552 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1554 if (TCG_TARGET_REG_BITS
== 64) {
1555 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1556 } else if (cond
== TCG_COND_ALWAYS
) {
1558 } else if (cond
!= TCG_COND_NEVER
) {
1559 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1560 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1561 tcg_constant_i32(arg2
),
1562 tcg_constant_i32(arg2
>> 32),
1563 cond
, label_arg(l
));
1564 add_last_as_label_use(l
);
1568 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1569 TCGv_i64 arg1
, TCGv_i64 arg2
)
1571 if (cond
== TCG_COND_ALWAYS
) {
1572 tcg_gen_movi_i64(ret
, 1);
1573 } else if (cond
== TCG_COND_NEVER
) {
1574 tcg_gen_movi_i64(ret
, 0);
1576 if (TCG_TARGET_REG_BITS
== 32) {
1577 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1578 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1579 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1580 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1582 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1587 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1588 TCGv_i64 arg1
, int64_t arg2
)
1590 if (TCG_TARGET_REG_BITS
== 64) {
1591 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1592 } else if (cond
== TCG_COND_ALWAYS
) {
1593 tcg_gen_movi_i64(ret
, 1);
1594 } else if (cond
== TCG_COND_NEVER
) {
1595 tcg_gen_movi_i64(ret
, 0);
1597 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1598 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1599 tcg_constant_i32(arg2
),
1600 tcg_constant_i32(arg2
>> 32), cond
);
1601 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1605 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
1606 TCGv_i64 arg1
, TCGv_i64 arg2
)
1608 if (cond
== TCG_COND_ALWAYS
) {
1609 tcg_gen_movi_i64(ret
, -1);
1610 } else if (cond
== TCG_COND_NEVER
) {
1611 tcg_gen_movi_i64(ret
, 0);
1612 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
1613 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
1614 } else if (TCG_TARGET_REG_BITS
== 32) {
1615 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1616 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1617 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1618 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
1619 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
1621 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
1622 tcg_gen_neg_i64(ret
, ret
);
1626 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1629 tcg_gen_movi_i64(ret
, 0);
1630 } else if (is_power_of_2(arg2
)) {
1631 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1633 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1637 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1639 if (TCG_TARGET_HAS_div_i64
) {
1640 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1641 } else if (TCG_TARGET_HAS_div2_i64
) {
1642 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1643 tcg_gen_sari_i64(t0
, arg1
, 63);
1644 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1645 tcg_temp_free_i64(t0
);
1647 gen_helper_div_i64(ret
, arg1
, arg2
);
1651 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1653 if (TCG_TARGET_HAS_rem_i64
) {
1654 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1655 } else if (TCG_TARGET_HAS_div_i64
) {
1656 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1657 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1658 tcg_gen_mul_i64(t0
, t0
, arg2
);
1659 tcg_gen_sub_i64(ret
, arg1
, t0
);
1660 tcg_temp_free_i64(t0
);
1661 } else if (TCG_TARGET_HAS_div2_i64
) {
1662 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1663 tcg_gen_sari_i64(t0
, arg1
, 63);
1664 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1665 tcg_temp_free_i64(t0
);
1667 gen_helper_rem_i64(ret
, arg1
, arg2
);
1671 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1673 if (TCG_TARGET_HAS_div_i64
) {
1674 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1675 } else if (TCG_TARGET_HAS_div2_i64
) {
1676 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1677 tcg_gen_movi_i64(t0
, 0);
1678 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1679 tcg_temp_free_i64(t0
);
1681 gen_helper_divu_i64(ret
, arg1
, arg2
);
1685 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1687 if (TCG_TARGET_HAS_rem_i64
) {
1688 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1689 } else if (TCG_TARGET_HAS_div_i64
) {
1690 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1691 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1692 tcg_gen_mul_i64(t0
, t0
, arg2
);
1693 tcg_gen_sub_i64(ret
, arg1
, t0
);
1694 tcg_temp_free_i64(t0
);
1695 } else if (TCG_TARGET_HAS_div2_i64
) {
1696 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1697 tcg_gen_movi_i64(t0
, 0);
1698 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1699 tcg_temp_free_i64(t0
);
1701 gen_helper_remu_i64(ret
, arg1
, arg2
);
1705 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1707 if (TCG_TARGET_REG_BITS
== 32) {
1708 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1709 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1710 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1711 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1713 tcg_gen_shli_i64(ret
, arg
, 56);
1714 tcg_gen_sari_i64(ret
, ret
, 56);
1718 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1720 if (TCG_TARGET_REG_BITS
== 32) {
1721 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1722 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1723 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1724 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1726 tcg_gen_shli_i64(ret
, arg
, 48);
1727 tcg_gen_sari_i64(ret
, ret
, 48);
1731 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1733 if (TCG_TARGET_REG_BITS
== 32) {
1734 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1735 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1736 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1737 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1739 tcg_gen_shli_i64(ret
, arg
, 32);
1740 tcg_gen_sari_i64(ret
, ret
, 32);
1744 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1746 if (TCG_TARGET_REG_BITS
== 32) {
1747 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1748 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1749 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1750 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1752 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1756 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1758 if (TCG_TARGET_REG_BITS
== 32) {
1759 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1760 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1761 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1762 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1764 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1768 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1770 if (TCG_TARGET_REG_BITS
== 32) {
1771 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1772 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1773 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1774 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1776 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1781 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1783 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1785 * With TCG_BSWAP_IZ, x == zero, else undefined.
1786 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1788 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1790 /* Only one extension flag may be present. */
1791 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1793 if (TCG_TARGET_REG_BITS
== 32) {
1794 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
1795 if (flags
& TCG_BSWAP_OS
) {
1796 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1798 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1800 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1801 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
1803 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1804 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1806 /* arg = ......ab or xxxxxxab */
1807 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
1808 if (!(flags
& TCG_BSWAP_IZ
)) {
1809 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
1812 if (flags
& TCG_BSWAP_OS
) {
1813 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
1814 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
1815 } else if (flags
& TCG_BSWAP_OZ
) {
1816 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
1817 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
1819 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
1822 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
1824 /* xxxxxaba (no flag) */
1825 tcg_temp_free_i64(t0
);
1826 tcg_temp_free_i64(t1
);
1831 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
1833 * Byte pattern: xxxxabcd -> yyyydcba
1835 * With TCG_BSWAP_IZ, x == zero, else undefined.
1836 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1838 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1840 /* Only one extension flag may be present. */
1841 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1843 if (TCG_TARGET_REG_BITS
== 32) {
1844 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1845 if (flags
& TCG_BSWAP_OS
) {
1846 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1848 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1850 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1851 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
1853 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1854 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1855 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
1857 /* arg = xxxxabcd */
1858 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
1859 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
1860 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
1861 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
1862 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
1864 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
1865 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
1866 if (flags
& TCG_BSWAP_OS
) {
1867 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
1869 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
1871 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba (OS) */
1872 /* ....dcba (else) */
1874 tcg_temp_free_i64(t0
);
1875 tcg_temp_free_i64(t1
);
1880 * bswap64_i64: 64-bit byte swap on a 64-bit value.
1882 * Byte pattern: abcdefgh -> hgfedcba
1884 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1886 if (TCG_TARGET_REG_BITS
== 32) {
1888 t0
= tcg_temp_ebb_new_i32();
1889 t1
= tcg_temp_ebb_new_i32();
1891 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1892 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1893 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1894 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1895 tcg_temp_free_i32(t0
);
1896 tcg_temp_free_i32(t1
);
1897 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1898 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
1900 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1901 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1902 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
1904 /* arg = abcdefgh */
1905 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
1906 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
1907 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
1908 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
1909 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
1910 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
1912 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
1913 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
1914 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
1915 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
1916 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
1917 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
1919 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
1920 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
1921 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
1923 tcg_temp_free_i64(t0
);
1924 tcg_temp_free_i64(t1
);
1925 tcg_temp_free_i64(t2
);
1930 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
1931 * See also include/qemu/bitops.h, hswap64.
1933 * Byte pattern: abcdefgh -> ghefcdab
1935 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1937 uint64_t m
= 0x0000ffff0000ffffull
;
1938 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1939 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1941 /* arg = abcdefgh */
1942 tcg_gen_rotli_i64(t1
, arg
, 32); /* t1 = efghabcd */
1943 tcg_gen_andi_i64(t0
, t1
, m
); /* t0 = ..gh..cd */
1944 tcg_gen_shli_i64(t0
, t0
, 16); /* t0 = gh..cd.. */
1945 tcg_gen_shri_i64(t1
, t1
, 16); /* t1 = ..efghab */
1946 tcg_gen_andi_i64(t1
, t1
, m
); /* t1 = ..ef..ab */
1947 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ghefcdab */
1949 tcg_temp_free_i64(t0
);
1950 tcg_temp_free_i64(t1
);
1954 * wswap_i64: Swap 32-bit words within a 64-bit value.
1956 * Byte pattern: abcdefgh -> efghabcd
1958 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1960 /* Swapping 2 32-bit elements is a rotate. */
1961 tcg_gen_rotli_i64(ret
, arg
, 32);
1964 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1966 if (TCG_TARGET_REG_BITS
== 32) {
1967 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1968 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1969 } else if (TCG_TARGET_HAS_not_i64
) {
1970 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1972 tcg_gen_xori_i64(ret
, arg
, -1);
1976 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1978 if (TCG_TARGET_REG_BITS
== 32) {
1979 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1980 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1981 } else if (TCG_TARGET_HAS_andc_i64
) {
1982 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1984 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1985 tcg_gen_not_i64(t0
, arg2
);
1986 tcg_gen_and_i64(ret
, arg1
, t0
);
1987 tcg_temp_free_i64(t0
);
1991 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1993 if (TCG_TARGET_REG_BITS
== 32) {
1994 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1995 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1996 } else if (TCG_TARGET_HAS_eqv_i64
) {
1997 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1999 tcg_gen_xor_i64(ret
, arg1
, arg2
);
2000 tcg_gen_not_i64(ret
, ret
);
2004 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2006 if (TCG_TARGET_REG_BITS
== 32) {
2007 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2008 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2009 } else if (TCG_TARGET_HAS_nand_i64
) {
2010 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
2012 tcg_gen_and_i64(ret
, arg1
, arg2
);
2013 tcg_gen_not_i64(ret
, ret
);
2017 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2019 if (TCG_TARGET_REG_BITS
== 32) {
2020 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2021 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2022 } else if (TCG_TARGET_HAS_nor_i64
) {
2023 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
2025 tcg_gen_or_i64(ret
, arg1
, arg2
);
2026 tcg_gen_not_i64(ret
, ret
);
2030 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2032 if (TCG_TARGET_REG_BITS
== 32) {
2033 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2034 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2035 } else if (TCG_TARGET_HAS_orc_i64
) {
2036 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2038 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2039 tcg_gen_not_i64(t0
, arg2
);
2040 tcg_gen_or_i64(ret
, arg1
, t0
);
2041 tcg_temp_free_i64(t0
);
2045 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2047 if (TCG_TARGET_HAS_clz_i64
) {
2048 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2050 gen_helper_clz_i64(ret
, arg1
, arg2
);
2054 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2056 if (TCG_TARGET_REG_BITS
== 32
2057 && TCG_TARGET_HAS_clz_i32
2058 && arg2
<= 0xffffffffu
) {
2059 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2060 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2061 tcg_gen_addi_i32(t
, t
, 32);
2062 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2063 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2064 tcg_temp_free_i32(t
);
2066 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2070 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2072 if (TCG_TARGET_HAS_ctz_i64
) {
2073 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2074 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2075 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2077 if (TCG_TARGET_HAS_ctpop_i64
) {
2078 tcg_gen_subi_i64(t
, arg1
, 1);
2079 tcg_gen_andc_i64(t
, t
, arg1
);
2080 tcg_gen_ctpop_i64(t
, t
);
2082 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2083 tcg_gen_neg_i64(t
, arg1
);
2084 tcg_gen_and_i64(t
, t
, arg1
);
2085 tcg_gen_clzi_i64(t
, t
, 64);
2086 tcg_gen_xori_i64(t
, t
, 63);
2088 z
= tcg_constant_i64(0);
2089 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2090 tcg_temp_free_i64(t
);
2091 tcg_temp_free_i64(z
);
2093 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2097 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2099 if (TCG_TARGET_REG_BITS
== 32
2100 && TCG_TARGET_HAS_ctz_i32
2101 && arg2
<= 0xffffffffu
) {
2102 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2103 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2104 tcg_gen_addi_i32(t32
, t32
, 32);
2105 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2106 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2107 tcg_temp_free_i32(t32
);
2108 } else if (!TCG_TARGET_HAS_ctz_i64
2109 && TCG_TARGET_HAS_ctpop_i64
2111 /* This equivalence has the advantage of not requiring a fixup. */
2112 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2113 tcg_gen_subi_i64(t
, arg1
, 1);
2114 tcg_gen_andc_i64(t
, t
, arg1
);
2115 tcg_gen_ctpop_i64(ret
, t
);
2116 tcg_temp_free_i64(t
);
2118 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2122 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2124 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2125 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2126 tcg_gen_sari_i64(t
, arg
, 63);
2127 tcg_gen_xor_i64(t
, t
, arg
);
2128 tcg_gen_clzi_i64(t
, t
, 64);
2129 tcg_gen_subi_i64(ret
, t
, 1);
2130 tcg_temp_free_i64(t
);
2132 gen_helper_clrsb_i64(ret
, arg
);
2136 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2138 if (TCG_TARGET_HAS_ctpop_i64
) {
2139 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2140 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2141 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2142 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2143 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2144 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2146 gen_helper_ctpop_i64(ret
, arg1
);
2150 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2152 if (TCG_TARGET_HAS_rot_i64
) {
2153 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2156 t0
= tcg_temp_ebb_new_i64();
2157 t1
= tcg_temp_ebb_new_i64();
2158 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2159 tcg_gen_subfi_i64(t1
, 64, arg2
);
2160 tcg_gen_shr_i64(t1
, arg1
, t1
);
2161 tcg_gen_or_i64(ret
, t0
, t1
);
2162 tcg_temp_free_i64(t0
);
2163 tcg_temp_free_i64(t1
);
2167 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2169 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2170 /* some cases can be optimized here */
2172 tcg_gen_mov_i64(ret
, arg1
);
2173 } else if (TCG_TARGET_HAS_rot_i64
) {
2174 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2177 t0
= tcg_temp_ebb_new_i64();
2178 t1
= tcg_temp_ebb_new_i64();
2179 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2180 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2181 tcg_gen_or_i64(ret
, t0
, t1
);
2182 tcg_temp_free_i64(t0
);
2183 tcg_temp_free_i64(t1
);
2187 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2189 if (TCG_TARGET_HAS_rot_i64
) {
2190 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2193 t0
= tcg_temp_ebb_new_i64();
2194 t1
= tcg_temp_ebb_new_i64();
2195 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2196 tcg_gen_subfi_i64(t1
, 64, arg2
);
2197 tcg_gen_shl_i64(t1
, arg1
, t1
);
2198 tcg_gen_or_i64(ret
, t0
, t1
);
2199 tcg_temp_free_i64(t0
);
2200 tcg_temp_free_i64(t1
);
2204 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2206 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2207 /* some cases can be optimized here */
2209 tcg_gen_mov_i64(ret
, arg1
);
2211 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2215 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2216 unsigned int ofs
, unsigned int len
)
2221 tcg_debug_assert(ofs
< 64);
2222 tcg_debug_assert(len
> 0);
2223 tcg_debug_assert(len
<= 64);
2224 tcg_debug_assert(ofs
+ len
<= 64);
2227 tcg_gen_mov_i64(ret
, arg2
);
2230 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2231 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2235 if (TCG_TARGET_REG_BITS
== 32) {
2237 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2238 TCGV_LOW(arg2
), ofs
- 32, len
);
2239 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2242 if (ofs
+ len
<= 32) {
2243 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2244 TCGV_LOW(arg2
), ofs
, len
);
2245 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2250 t1
= tcg_temp_ebb_new_i64();
2252 if (TCG_TARGET_HAS_extract2_i64
) {
2253 if (ofs
+ len
== 64) {
2254 tcg_gen_shli_i64(t1
, arg1
, len
);
2255 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2259 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2260 tcg_gen_rotli_i64(ret
, ret
, len
);
2265 mask
= (1ull << len
) - 1;
2266 if (ofs
+ len
< 64) {
2267 tcg_gen_andi_i64(t1
, arg2
, mask
);
2268 tcg_gen_shli_i64(t1
, t1
, ofs
);
2270 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2272 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2273 tcg_gen_or_i64(ret
, ret
, t1
);
2275 tcg_temp_free_i64(t1
);
2278 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2279 unsigned int ofs
, unsigned int len
)
2281 tcg_debug_assert(ofs
< 64);
2282 tcg_debug_assert(len
> 0);
2283 tcg_debug_assert(len
<= 64);
2284 tcg_debug_assert(ofs
+ len
<= 64);
2286 if (ofs
+ len
== 64) {
2287 tcg_gen_shli_i64(ret
, arg
, ofs
);
2288 } else if (ofs
== 0) {
2289 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2290 } else if (TCG_TARGET_HAS_deposit_i64
2291 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2292 TCGv_i64 zero
= tcg_constant_i64(0);
2293 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2295 if (TCG_TARGET_REG_BITS
== 32) {
2297 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2299 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2302 if (ofs
+ len
<= 32) {
2303 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2304 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2308 /* To help two-operand hosts we prefer to zero-extend first,
2309 which allows ARG to stay live. */
2312 if (TCG_TARGET_HAS_ext32u_i64
) {
2313 tcg_gen_ext32u_i64(ret
, arg
);
2314 tcg_gen_shli_i64(ret
, ret
, ofs
);
2319 if (TCG_TARGET_HAS_ext16u_i64
) {
2320 tcg_gen_ext16u_i64(ret
, arg
);
2321 tcg_gen_shli_i64(ret
, ret
, ofs
);
2326 if (TCG_TARGET_HAS_ext8u_i64
) {
2327 tcg_gen_ext8u_i64(ret
, arg
);
2328 tcg_gen_shli_i64(ret
, ret
, ofs
);
2333 /* Otherwise prefer zero-extension over AND for code size. */
2334 switch (ofs
+ len
) {
2336 if (TCG_TARGET_HAS_ext32u_i64
) {
2337 tcg_gen_shli_i64(ret
, arg
, ofs
);
2338 tcg_gen_ext32u_i64(ret
, ret
);
2343 if (TCG_TARGET_HAS_ext16u_i64
) {
2344 tcg_gen_shli_i64(ret
, arg
, ofs
);
2345 tcg_gen_ext16u_i64(ret
, ret
);
2350 if (TCG_TARGET_HAS_ext8u_i64
) {
2351 tcg_gen_shli_i64(ret
, arg
, ofs
);
2352 tcg_gen_ext8u_i64(ret
, ret
);
2357 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2358 tcg_gen_shli_i64(ret
, ret
, ofs
);
2362 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2363 unsigned int ofs
, unsigned int len
)
2365 tcg_debug_assert(ofs
< 64);
2366 tcg_debug_assert(len
> 0);
2367 tcg_debug_assert(len
<= 64);
2368 tcg_debug_assert(ofs
+ len
<= 64);
2370 /* Canonicalize certain special cases, even if extract is supported. */
2371 if (ofs
+ len
== 64) {
2372 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2376 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2380 if (TCG_TARGET_REG_BITS
== 32) {
2381 /* Look for a 32-bit extract within one of the two words. */
2383 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2384 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2387 if (ofs
+ len
<= 32) {
2388 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2389 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2392 /* The field is split across two words. One double-word
2393 shift is better than two double-word shifts. */
2397 if (TCG_TARGET_HAS_extract_i64
2398 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2399 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2403 /* Assume that zero-extension, if available, is cheaper than a shift. */
2404 switch (ofs
+ len
) {
2406 if (TCG_TARGET_HAS_ext32u_i64
) {
2407 tcg_gen_ext32u_i64(ret
, arg
);
2408 tcg_gen_shri_i64(ret
, ret
, ofs
);
2413 if (TCG_TARGET_HAS_ext16u_i64
) {
2414 tcg_gen_ext16u_i64(ret
, arg
);
2415 tcg_gen_shri_i64(ret
, ret
, ofs
);
2420 if (TCG_TARGET_HAS_ext8u_i64
) {
2421 tcg_gen_ext8u_i64(ret
, arg
);
2422 tcg_gen_shri_i64(ret
, ret
, ofs
);
2428 /* ??? Ideally we'd know what values are available for immediate AND.
2429 Assume that 8 bits are available, plus the special cases of 16 and 32,
2430 so that we get ext8u, ext16u, and ext32u. */
2432 case 1 ... 8: case 16: case 32:
2434 tcg_gen_shri_i64(ret
, arg
, ofs
);
2435 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2438 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2439 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2444 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2445 unsigned int ofs
, unsigned int len
)
2447 tcg_debug_assert(ofs
< 64);
2448 tcg_debug_assert(len
> 0);
2449 tcg_debug_assert(len
<= 64);
2450 tcg_debug_assert(ofs
+ len
<= 64);
2452 /* Canonicalize certain special cases, even if sextract is supported. */
2453 if (ofs
+ len
== 64) {
2454 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2460 tcg_gen_ext32s_i64(ret
, arg
);
2463 tcg_gen_ext16s_i64(ret
, arg
);
2466 tcg_gen_ext8s_i64(ret
, arg
);
2471 if (TCG_TARGET_REG_BITS
== 32) {
2472 /* Look for a 32-bit extract within one of the two words. */
2474 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2475 } else if (ofs
+ len
<= 32) {
2476 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2477 } else if (ofs
== 0) {
2478 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2479 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2481 } else if (len
> 32) {
2482 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2483 /* Extract the bits for the high word normally. */
2484 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2485 /* Shift the field down for the low part. */
2486 tcg_gen_shri_i64(ret
, arg
, ofs
);
2487 /* Overwrite the shift into the high part. */
2488 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2489 tcg_temp_free_i32(t
);
2492 /* Shift the field down for the low part, such that the
2493 field sits at the MSB. */
2494 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2495 /* Shift the field down from the MSB, sign extending. */
2496 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2498 /* Sign-extend the field from 32 bits. */
2499 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2503 if (TCG_TARGET_HAS_sextract_i64
2504 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2505 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2509 /* Assume that sign-extension, if available, is cheaper than a shift. */
2510 switch (ofs
+ len
) {
2512 if (TCG_TARGET_HAS_ext32s_i64
) {
2513 tcg_gen_ext32s_i64(ret
, arg
);
2514 tcg_gen_sari_i64(ret
, ret
, ofs
);
2519 if (TCG_TARGET_HAS_ext16s_i64
) {
2520 tcg_gen_ext16s_i64(ret
, arg
);
2521 tcg_gen_sari_i64(ret
, ret
, ofs
);
2526 if (TCG_TARGET_HAS_ext8s_i64
) {
2527 tcg_gen_ext8s_i64(ret
, arg
);
2528 tcg_gen_sari_i64(ret
, ret
, ofs
);
2535 if (TCG_TARGET_HAS_ext32s_i64
) {
2536 tcg_gen_shri_i64(ret
, arg
, ofs
);
2537 tcg_gen_ext32s_i64(ret
, ret
);
2542 if (TCG_TARGET_HAS_ext16s_i64
) {
2543 tcg_gen_shri_i64(ret
, arg
, ofs
);
2544 tcg_gen_ext16s_i64(ret
, ret
);
2549 if (TCG_TARGET_HAS_ext8s_i64
) {
2550 tcg_gen_shri_i64(ret
, arg
, ofs
);
2551 tcg_gen_ext8s_i64(ret
, ret
);
2556 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2557 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2561 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2562 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2564 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2567 tcg_debug_assert(ofs
<= 64);
2569 tcg_gen_mov_i64(ret
, al
);
2570 } else if (ofs
== 64) {
2571 tcg_gen_mov_i64(ret
, ah
);
2572 } else if (al
== ah
) {
2573 tcg_gen_rotri_i64(ret
, al
, ofs
);
2574 } else if (TCG_TARGET_HAS_extract2_i64
) {
2575 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2577 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2578 tcg_gen_shri_i64(t0
, al
, ofs
);
2579 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2580 tcg_temp_free_i64(t0
);
2584 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2585 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2587 if (cond
== TCG_COND_ALWAYS
) {
2588 tcg_gen_mov_i64(ret
, v1
);
2589 } else if (cond
== TCG_COND_NEVER
) {
2590 tcg_gen_mov_i64(ret
, v2
);
2591 } else if (TCG_TARGET_REG_BITS
== 32) {
2592 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2593 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
2594 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2595 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2596 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2598 if (TCG_TARGET_HAS_movcond_i32
) {
2599 tcg_gen_movi_i32(t1
, 0);
2600 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2601 TCGV_LOW(v1
), TCGV_LOW(v2
));
2602 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2603 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2605 tcg_gen_neg_i32(t0
, t0
);
2607 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2608 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2609 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2611 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2612 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2613 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2615 tcg_temp_free_i32(t0
);
2616 tcg_temp_free_i32(t1
);
2617 } else if (TCG_TARGET_HAS_movcond_i64
) {
2618 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2620 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2621 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2622 tcg_gen_negsetcond_i64(cond
, t0
, c1
, c2
);
2623 tcg_gen_and_i64(t1
, v1
, t0
);
2624 tcg_gen_andc_i64(ret
, v2
, t0
);
2625 tcg_gen_or_i64(ret
, ret
, t1
);
2626 tcg_temp_free_i64(t0
);
2627 tcg_temp_free_i64(t1
);
2631 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2632 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2634 if (TCG_TARGET_HAS_add2_i64
) {
2635 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2637 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2638 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2639 tcg_gen_add_i64(t0
, al
, bl
);
2640 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2641 tcg_gen_add_i64(rh
, ah
, bh
);
2642 tcg_gen_add_i64(rh
, rh
, t1
);
2643 tcg_gen_mov_i64(rl
, t0
);
2644 tcg_temp_free_i64(t0
);
2645 tcg_temp_free_i64(t1
);
2649 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2650 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2652 if (TCG_TARGET_HAS_sub2_i64
) {
2653 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2655 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2656 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2657 tcg_gen_sub_i64(t0
, al
, bl
);
2658 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2659 tcg_gen_sub_i64(rh
, ah
, bh
);
2660 tcg_gen_sub_i64(rh
, rh
, t1
);
2661 tcg_gen_mov_i64(rl
, t0
);
2662 tcg_temp_free_i64(t0
);
2663 tcg_temp_free_i64(t1
);
2667 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2669 if (TCG_TARGET_HAS_mulu2_i64
) {
2670 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2671 } else if (TCG_TARGET_HAS_muluh_i64
) {
2672 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2673 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2674 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2675 tcg_gen_mov_i64(rl
, t
);
2676 tcg_temp_free_i64(t
);
2678 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2679 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2680 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2681 tcg_gen_mov_i64(rl
, t0
);
2682 tcg_temp_free_i64(t0
);
2686 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2688 if (TCG_TARGET_HAS_muls2_i64
) {
2689 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2690 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2691 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2692 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2693 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2694 tcg_gen_mov_i64(rl
, t
);
2695 tcg_temp_free_i64(t
);
2696 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2697 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2698 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2699 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2700 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
2701 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2702 /* Adjust for negative inputs. */
2703 tcg_gen_sari_i64(t2
, arg1
, 63);
2704 tcg_gen_sari_i64(t3
, arg2
, 63);
2705 tcg_gen_and_i64(t2
, t2
, arg2
);
2706 tcg_gen_and_i64(t3
, t3
, arg1
);
2707 tcg_gen_sub_i64(rh
, t1
, t2
);
2708 tcg_gen_sub_i64(rh
, rh
, t3
);
2709 tcg_gen_mov_i64(rl
, t0
);
2710 tcg_temp_free_i64(t0
);
2711 tcg_temp_free_i64(t1
);
2712 tcg_temp_free_i64(t2
);
2713 tcg_temp_free_i64(t3
);
2715 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2716 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2717 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2718 tcg_gen_mov_i64(rl
, t0
);
2719 tcg_temp_free_i64(t0
);
2723 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2725 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2726 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2727 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2728 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2729 /* Adjust for negative input for the signed arg1. */
2730 tcg_gen_sari_i64(t2
, arg1
, 63);
2731 tcg_gen_and_i64(t2
, t2
, arg2
);
2732 tcg_gen_sub_i64(rh
, t1
, t2
);
2733 tcg_gen_mov_i64(rl
, t0
);
2734 tcg_temp_free_i64(t0
);
2735 tcg_temp_free_i64(t1
);
2736 tcg_temp_free_i64(t2
);
2739 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2741 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
2744 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2746 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
2749 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2751 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
2754 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2756 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
2759 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
2761 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2763 tcg_gen_sari_i64(t
, a
, 63);
2764 tcg_gen_xor_i64(ret
, a
, t
);
2765 tcg_gen_sub_i64(ret
, ret
, t
);
2766 tcg_temp_free_i64(t
);
2769 /* Size changing operations. */
2771 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2773 if (TCG_TARGET_REG_BITS
== 32) {
2774 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2775 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2776 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2777 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2779 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2783 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2785 if (TCG_TARGET_REG_BITS
== 32) {
2786 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2787 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2788 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2789 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2791 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2792 tcg_gen_shri_i64(t
, arg
, 32);
2793 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2794 tcg_temp_free_i64(t
);
2798 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2800 if (TCG_TARGET_REG_BITS
== 32) {
2801 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2802 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2804 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2805 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2809 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2811 if (TCG_TARGET_REG_BITS
== 32) {
2812 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2813 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2815 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2816 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2820 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2824 if (TCG_TARGET_REG_BITS
== 32) {
2825 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2826 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2830 tmp
= tcg_temp_ebb_new_i64();
2831 /* These extensions are only needed for type correctness.
2832 We may be able to do better given target specific information. */
2833 tcg_gen_extu_i32_i64(tmp
, high
);
2834 tcg_gen_extu_i32_i64(dest
, low
);
2835 /* If deposit is available, use it. Otherwise use the extra
2836 knowledge that we have of the zero-extensions above. */
2837 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2838 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2840 tcg_gen_shli_i64(tmp
, tmp
, 32);
2841 tcg_gen_or_i64(dest
, dest
, tmp
);
2843 tcg_temp_free_i64(tmp
);
2846 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2848 if (TCG_TARGET_REG_BITS
== 32) {
2849 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2850 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2852 tcg_gen_extrl_i64_i32(lo
, arg
);
2853 tcg_gen_extrh_i64_i32(hi
, arg
);
2857 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2859 tcg_gen_ext32u_i64(lo
, arg
);
2860 tcg_gen_shri_i64(hi
, arg
, 32);
2863 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
2865 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
2866 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
2869 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
2871 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
2872 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
2875 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
2878 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
2879 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
2883 /* QEMU specific operations. */
2885 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
2888 * Let the jit code return the read-only version of the
2889 * TranslationBlock, so that we minimize the pc-relative
2890 * distance of the address of the exit_tb code to TB.
2891 * This will improve utilization of pc-relative address loads.
2893 * TODO: Move this to translator_loop, so that all const
2894 * TranslationBlock pointers refer to read-only memory.
2895 * This requires coordination with targets that do not use
2896 * the translator_loop.
2898 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
2901 tcg_debug_assert(idx
== 0);
2902 } else if (idx
<= TB_EXIT_IDXMAX
) {
2903 #ifdef CONFIG_DEBUG_TCG
2904 /* This is an exit following a goto_tb. Verify that we have
2905 seen this numbered exit before, via tcg_gen_goto_tb. */
2906 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
2909 /* This is an exit via the exitreq label. */
2910 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
2913 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2916 void tcg_gen_goto_tb(unsigned idx
)
2918 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2919 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
2920 /* We only support two chained exits. */
2921 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
2922 #ifdef CONFIG_DEBUG_TCG
2923 /* Verify that we haven't seen this numbered exit before. */
2924 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2925 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2927 plugin_gen_disable_mem_helpers();
2928 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2931 void tcg_gen_lookup_and_goto_ptr(void)
2935 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
2936 tcg_gen_exit_tb(NULL
, 0);
2940 plugin_gen_disable_mem_helpers();
2941 ptr
= tcg_temp_ebb_new_ptr();
2942 gen_helper_lookup_tb_ptr(ptr
, tcg_env
);
2943 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2944 tcg_temp_free_ptr(ptr
);