2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
38 #define NI __attribute__((noinline))
40 void NI
tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
42 TCGOp
*op
= tcg_emit_op(opc
, 1);
46 void NI
tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
48 TCGOp
*op
= tcg_emit_op(opc
, 2);
53 void NI
tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
55 TCGOp
*op
= tcg_emit_op(opc
, 3);
61 void NI
tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
63 TCGOp
*op
= tcg_emit_op(opc
, 4);
70 void NI
tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
73 TCGOp
*op
= tcg_emit_op(opc
, 5);
81 void NI
tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
82 TCGArg a4
, TCGArg a5
, TCGArg a6
)
84 TCGOp
*op
= tcg_emit_op(opc
, 6);
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
97 #ifdef CONFIG_DEBUG_TCG
103 static void DNI
tcg_gen_op1_i32(TCGOpcode opc
, TCGv_i32 a1
)
105 tcg_gen_op1(opc
, tcgv_i32_arg(a1
));
108 static void DNI
tcg_gen_op1_i64(TCGOpcode opc
, TCGv_i64 a1
)
110 tcg_gen_op1(opc
, tcgv_i64_arg(a1
));
113 static void DNI
tcg_gen_op1i(TCGOpcode opc
, TCGArg a1
)
115 tcg_gen_op1(opc
, a1
);
118 static void DNI
tcg_gen_op2_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
)
120 tcg_gen_op2(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
));
123 static void DNI
tcg_gen_op2_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
)
125 tcg_gen_op2(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
));
128 static void DNI
tcg_gen_op3_i32(TCGOpcode opc
, TCGv_i32 a1
,
129 TCGv_i32 a2
, TCGv_i32 a3
)
131 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), tcgv_i32_arg(a3
));
134 static void DNI
tcg_gen_op3_i64(TCGOpcode opc
, TCGv_i64 a1
,
135 TCGv_i64 a2
, TCGv_i64 a3
)
137 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), tcgv_i64_arg(a3
));
140 static void DNI
tcg_gen_op3i_i32(TCGOpcode opc
, TCGv_i32 a1
,
141 TCGv_i32 a2
, TCGArg a3
)
143 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
);
146 static void DNI
tcg_gen_op3i_i64(TCGOpcode opc
, TCGv_i64 a1
,
147 TCGv_i64 a2
, TCGArg a3
)
149 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
);
152 static void DNI
tcg_gen_ldst_op_i32(TCGOpcode opc
, TCGv_i32 val
,
153 TCGv_ptr base
, TCGArg offset
)
155 tcg_gen_op3(opc
, tcgv_i32_arg(val
), tcgv_ptr_arg(base
), offset
);
158 static void DNI
tcg_gen_ldst_op_i64(TCGOpcode opc
, TCGv_i64 val
,
159 TCGv_ptr base
, TCGArg offset
)
161 tcg_gen_op3(opc
, tcgv_i64_arg(val
), tcgv_ptr_arg(base
), offset
);
164 static void DNI
tcg_gen_op4_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
165 TCGv_i32 a3
, TCGv_i32 a4
)
167 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
168 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
));
171 static void DNI
tcg_gen_op4_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
172 TCGv_i64 a3
, TCGv_i64 a4
)
174 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
175 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
));
178 static void DNI
tcg_gen_op4i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
179 TCGv_i32 a3
, TCGArg a4
)
181 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
182 tcgv_i32_arg(a3
), a4
);
185 static void DNI
tcg_gen_op4i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
186 TCGv_i64 a3
, TCGArg a4
)
188 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
189 tcgv_i64_arg(a3
), a4
);
192 static void DNI
tcg_gen_op4ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
193 TCGArg a3
, TCGArg a4
)
195 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
, a4
);
198 static void DNI
tcg_gen_op4ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
199 TCGArg a3
, TCGArg a4
)
201 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
, a4
);
204 static void DNI
tcg_gen_op5_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
205 TCGv_i32 a3
, TCGv_i32 a4
, TCGv_i32 a5
)
207 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
208 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
));
211 static void DNI
tcg_gen_op5_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
212 TCGv_i64 a3
, TCGv_i64 a4
, TCGv_i64 a5
)
214 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
215 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
));
218 static void DNI
tcg_gen_op5ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
219 TCGv_i32 a3
, TCGArg a4
, TCGArg a5
)
221 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
222 tcgv_i32_arg(a3
), a4
, a5
);
225 static void DNI
tcg_gen_op5ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
226 TCGv_i64 a3
, TCGArg a4
, TCGArg a5
)
228 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
229 tcgv_i64_arg(a3
), a4
, a5
);
232 static void DNI
tcg_gen_op6_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
233 TCGv_i32 a3
, TCGv_i32 a4
,
234 TCGv_i32 a5
, TCGv_i32 a6
)
236 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
237 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
),
241 static void DNI
tcg_gen_op6_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
242 TCGv_i64 a3
, TCGv_i64 a4
,
243 TCGv_i64 a5
, TCGv_i64 a6
)
245 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
246 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
),
250 static void DNI
tcg_gen_op6i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
251 TCGv_i32 a3
, TCGv_i32 a4
,
252 TCGv_i32 a5
, TCGArg a6
)
254 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
255 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
), a6
);
258 static void DNI
tcg_gen_op6i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
259 TCGv_i64 a3
, TCGv_i64 a4
,
260 TCGv_i64 a5
, TCGArg a6
)
262 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
263 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
), a6
);
266 static void DNI
tcg_gen_op6ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
267 TCGv_i32 a3
, TCGv_i32 a4
,
268 TCGArg a5
, TCGArg a6
)
270 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
271 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), a5
, a6
);
276 void gen_set_label(TCGLabel
*l
)
279 tcg_gen_op1(INDEX_op_set_label
, label_arg(l
));
282 static void add_last_as_label_use(TCGLabel
*l
)
284 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
286 u
->op
= tcg_last_op();
287 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
290 void tcg_gen_br(TCGLabel
*l
)
292 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
293 add_last_as_label_use(l
);
296 void tcg_gen_mb(TCGBar mb_type
)
298 #ifdef CONFIG_USER_ONLY
299 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
302 * It is tempting to elide the barrier in a uniprocessor context.
303 * However, even with a single cpu we have i/o threads running in
304 * parallel, and lack of memory order can result in e.g. virtio
305 * queue entries being read incorrectly.
307 bool parallel
= true;
311 tcg_gen_op1(INDEX_op_mb
, mb_type
);
315 void tcg_gen_plugin_cb_start(unsigned from
, unsigned type
, unsigned wr
)
317 tcg_gen_op3(INDEX_op_plugin_cb_start
, from
, type
, wr
);
320 void tcg_gen_plugin_cb_end(void)
322 tcg_emit_op(INDEX_op_plugin_cb_end
, 0);
327 void tcg_gen_discard_i32(TCGv_i32 arg
)
329 tcg_gen_op1_i32(INDEX_op_discard
, arg
);
332 void tcg_gen_mov_i32(TCGv_i32 ret
, TCGv_i32 arg
)
335 tcg_gen_op2_i32(INDEX_op_mov_i32
, ret
, arg
);
339 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
341 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
344 void tcg_gen_add_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
346 tcg_gen_op3_i32(INDEX_op_add_i32
, ret
, arg1
, arg2
);
349 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
351 /* some cases can be optimized here */
353 tcg_gen_mov_i32(ret
, arg1
);
355 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
359 void tcg_gen_sub_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
361 tcg_gen_op3_i32(INDEX_op_sub_i32
, ret
, arg1
, arg2
);
364 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
367 tcg_gen_neg_i32(ret
, arg2
);
369 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
373 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
375 tcg_gen_addi_i32(ret
, arg1
, -arg2
);
378 void tcg_gen_neg_i32(TCGv_i32 ret
, TCGv_i32 arg
)
380 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg
);
383 void tcg_gen_and_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
385 tcg_gen_op3_i32(INDEX_op_and_i32
, ret
, arg1
, arg2
);
388 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
390 /* Some cases can be optimized here. */
393 tcg_gen_movi_i32(ret
, 0);
396 tcg_gen_mov_i32(ret
, arg1
);
399 /* Don't recurse with tcg_gen_ext8u_i32. */
400 if (TCG_TARGET_HAS_ext8u_i32
) {
401 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
406 if (TCG_TARGET_HAS_ext16u_i32
) {
407 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
413 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
416 void tcg_gen_or_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
418 tcg_gen_op3_i32(INDEX_op_or_i32
, ret
, arg1
, arg2
);
421 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
423 /* Some cases can be optimized here. */
425 tcg_gen_movi_i32(ret
, -1);
426 } else if (arg2
== 0) {
427 tcg_gen_mov_i32(ret
, arg1
);
429 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
433 void tcg_gen_xor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
435 tcg_gen_op3_i32(INDEX_op_xor_i32
, ret
, arg1
, arg2
);
438 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
440 /* Some cases can be optimized here. */
442 tcg_gen_mov_i32(ret
, arg1
);
443 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
444 /* Don't recurse with tcg_gen_not_i32. */
445 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
447 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
451 void tcg_gen_not_i32(TCGv_i32 ret
, TCGv_i32 arg
)
453 if (TCG_TARGET_HAS_not_i32
) {
454 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg
);
456 tcg_gen_xori_i32(ret
, arg
, -1);
460 void tcg_gen_shl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
462 tcg_gen_op3_i32(INDEX_op_shl_i32
, ret
, arg1
, arg2
);
465 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
467 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
469 tcg_gen_mov_i32(ret
, arg1
);
471 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
475 void tcg_gen_shr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
477 tcg_gen_op3_i32(INDEX_op_shr_i32
, ret
, arg1
, arg2
);
480 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
482 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
484 tcg_gen_mov_i32(ret
, arg1
);
486 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
490 void tcg_gen_sar_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
492 tcg_gen_op3_i32(INDEX_op_sar_i32
, ret
, arg1
, arg2
);
495 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
497 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
499 tcg_gen_mov_i32(ret
, arg1
);
501 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
505 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
507 if (cond
== TCG_COND_ALWAYS
) {
509 } else if (cond
!= TCG_COND_NEVER
) {
510 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
511 add_last_as_label_use(l
);
515 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
517 if (cond
== TCG_COND_ALWAYS
) {
519 } else if (cond
!= TCG_COND_NEVER
) {
520 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
524 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
525 TCGv_i32 arg1
, TCGv_i32 arg2
)
527 if (cond
== TCG_COND_ALWAYS
) {
528 tcg_gen_movi_i32(ret
, 1);
529 } else if (cond
== TCG_COND_NEVER
) {
530 tcg_gen_movi_i32(ret
, 0);
532 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
536 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
537 TCGv_i32 arg1
, int32_t arg2
)
539 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
542 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
543 TCGv_i32 arg1
, TCGv_i32 arg2
)
545 if (cond
== TCG_COND_ALWAYS
) {
546 tcg_gen_movi_i32(ret
, -1);
547 } else if (cond
== TCG_COND_NEVER
) {
548 tcg_gen_movi_i32(ret
, 0);
549 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
550 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
552 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
553 tcg_gen_neg_i32(ret
, ret
);
557 void tcg_gen_negsetcondi_i32(TCGCond cond
, TCGv_i32 ret
,
558 TCGv_i32 arg1
, int32_t arg2
)
560 tcg_gen_negsetcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
563 void tcg_gen_mul_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
565 tcg_gen_op3_i32(INDEX_op_mul_i32
, ret
, arg1
, arg2
);
568 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
571 tcg_gen_movi_i32(ret
, 0);
572 } else if (is_power_of_2(arg2
)) {
573 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
575 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
579 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
581 if (TCG_TARGET_HAS_div_i32
) {
582 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
583 } else if (TCG_TARGET_HAS_div2_i32
) {
584 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
585 tcg_gen_sari_i32(t0
, arg1
, 31);
586 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
587 tcg_temp_free_i32(t0
);
589 gen_helper_div_i32(ret
, arg1
, arg2
);
593 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
595 if (TCG_TARGET_HAS_rem_i32
) {
596 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
597 } else if (TCG_TARGET_HAS_div_i32
) {
598 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
599 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
600 tcg_gen_mul_i32(t0
, t0
, arg2
);
601 tcg_gen_sub_i32(ret
, arg1
, t0
);
602 tcg_temp_free_i32(t0
);
603 } else if (TCG_TARGET_HAS_div2_i32
) {
604 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
605 tcg_gen_sari_i32(t0
, arg1
, 31);
606 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
607 tcg_temp_free_i32(t0
);
609 gen_helper_rem_i32(ret
, arg1
, arg2
);
613 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
615 if (TCG_TARGET_HAS_div_i32
) {
616 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
617 } else if (TCG_TARGET_HAS_div2_i32
) {
618 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
619 TCGv_i32 zero
= tcg_constant_i32(0);
620 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, zero
, arg2
);
621 tcg_temp_free_i32(t0
);
623 gen_helper_divu_i32(ret
, arg1
, arg2
);
627 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
629 if (TCG_TARGET_HAS_rem_i32
) {
630 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
631 } else if (TCG_TARGET_HAS_div_i32
) {
632 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
633 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
634 tcg_gen_mul_i32(t0
, t0
, arg2
);
635 tcg_gen_sub_i32(ret
, arg1
, t0
);
636 tcg_temp_free_i32(t0
);
637 } else if (TCG_TARGET_HAS_div2_i32
) {
638 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
639 TCGv_i32 zero
= tcg_constant_i32(0);
640 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, zero
, arg2
);
641 tcg_temp_free_i32(t0
);
643 gen_helper_remu_i32(ret
, arg1
, arg2
);
647 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
649 if (TCG_TARGET_HAS_andc_i32
) {
650 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
652 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
653 tcg_gen_not_i32(t0
, arg2
);
654 tcg_gen_and_i32(ret
, arg1
, t0
);
655 tcg_temp_free_i32(t0
);
659 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
661 if (TCG_TARGET_HAS_eqv_i32
) {
662 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
664 tcg_gen_xor_i32(ret
, arg1
, arg2
);
665 tcg_gen_not_i32(ret
, ret
);
669 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
671 if (TCG_TARGET_HAS_nand_i32
) {
672 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
674 tcg_gen_and_i32(ret
, arg1
, arg2
);
675 tcg_gen_not_i32(ret
, ret
);
679 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
681 if (TCG_TARGET_HAS_nor_i32
) {
682 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
684 tcg_gen_or_i32(ret
, arg1
, arg2
);
685 tcg_gen_not_i32(ret
, ret
);
689 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
691 if (TCG_TARGET_HAS_orc_i32
) {
692 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
694 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
695 tcg_gen_not_i32(t0
, arg2
);
696 tcg_gen_or_i32(ret
, arg1
, t0
);
697 tcg_temp_free_i32(t0
);
701 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
703 if (TCG_TARGET_HAS_clz_i32
) {
704 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
705 } else if (TCG_TARGET_HAS_clz_i64
) {
706 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
707 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
708 tcg_gen_extu_i32_i64(t1
, arg1
);
709 tcg_gen_extu_i32_i64(t2
, arg2
);
710 tcg_gen_addi_i64(t2
, t2
, 32);
711 tcg_gen_clz_i64(t1
, t1
, t2
);
712 tcg_gen_extrl_i64_i32(ret
, t1
);
713 tcg_temp_free_i64(t1
);
714 tcg_temp_free_i64(t2
);
715 tcg_gen_subi_i32(ret
, ret
, 32);
717 gen_helper_clz_i32(ret
, arg1
, arg2
);
721 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
723 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
726 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
728 if (TCG_TARGET_HAS_ctz_i32
) {
729 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
730 } else if (TCG_TARGET_HAS_ctz_i64
) {
731 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
732 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
733 tcg_gen_extu_i32_i64(t1
, arg1
);
734 tcg_gen_extu_i32_i64(t2
, arg2
);
735 tcg_gen_ctz_i64(t1
, t1
, t2
);
736 tcg_gen_extrl_i64_i32(ret
, t1
);
737 tcg_temp_free_i64(t1
);
738 tcg_temp_free_i64(t2
);
739 } else if (TCG_TARGET_HAS_ctpop_i32
740 || TCG_TARGET_HAS_ctpop_i64
741 || TCG_TARGET_HAS_clz_i32
742 || TCG_TARGET_HAS_clz_i64
) {
743 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
745 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
746 tcg_gen_subi_i32(t
, arg1
, 1);
747 tcg_gen_andc_i32(t
, t
, arg1
);
748 tcg_gen_ctpop_i32(t
, t
);
750 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
751 tcg_gen_neg_i32(t
, arg1
);
752 tcg_gen_and_i32(t
, t
, arg1
);
753 tcg_gen_clzi_i32(t
, t
, 32);
754 tcg_gen_xori_i32(t
, t
, 31);
756 z
= tcg_constant_i32(0);
757 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
758 tcg_temp_free_i32(t
);
760 gen_helper_ctz_i32(ret
, arg1
, arg2
);
764 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
766 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
767 /* This equivalence has the advantage of not requiring a fixup. */
768 TCGv_i32 t
= tcg_temp_ebb_new_i32();
769 tcg_gen_subi_i32(t
, arg1
, 1);
770 tcg_gen_andc_i32(t
, t
, arg1
);
771 tcg_gen_ctpop_i32(ret
, t
);
772 tcg_temp_free_i32(t
);
774 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
778 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
780 if (TCG_TARGET_HAS_clz_i32
) {
781 TCGv_i32 t
= tcg_temp_ebb_new_i32();
782 tcg_gen_sari_i32(t
, arg
, 31);
783 tcg_gen_xor_i32(t
, t
, arg
);
784 tcg_gen_clzi_i32(t
, t
, 32);
785 tcg_gen_subi_i32(ret
, t
, 1);
786 tcg_temp_free_i32(t
);
788 gen_helper_clrsb_i32(ret
, arg
);
792 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
794 if (TCG_TARGET_HAS_ctpop_i32
) {
795 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
796 } else if (TCG_TARGET_HAS_ctpop_i64
) {
797 TCGv_i64 t
= tcg_temp_ebb_new_i64();
798 tcg_gen_extu_i32_i64(t
, arg1
);
799 tcg_gen_ctpop_i64(t
, t
);
800 tcg_gen_extrl_i64_i32(ret
, t
);
801 tcg_temp_free_i64(t
);
803 gen_helper_ctpop_i32(ret
, arg1
);
807 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
809 if (TCG_TARGET_HAS_rot_i32
) {
810 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
814 t0
= tcg_temp_ebb_new_i32();
815 t1
= tcg_temp_ebb_new_i32();
816 tcg_gen_shl_i32(t0
, arg1
, arg2
);
817 tcg_gen_subfi_i32(t1
, 32, arg2
);
818 tcg_gen_shr_i32(t1
, arg1
, t1
);
819 tcg_gen_or_i32(ret
, t0
, t1
);
820 tcg_temp_free_i32(t0
);
821 tcg_temp_free_i32(t1
);
825 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
827 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
828 /* some cases can be optimized here */
830 tcg_gen_mov_i32(ret
, arg1
);
831 } else if (TCG_TARGET_HAS_rot_i32
) {
832 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
835 t0
= tcg_temp_ebb_new_i32();
836 t1
= tcg_temp_ebb_new_i32();
837 tcg_gen_shli_i32(t0
, arg1
, arg2
);
838 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
839 tcg_gen_or_i32(ret
, t0
, t1
);
840 tcg_temp_free_i32(t0
);
841 tcg_temp_free_i32(t1
);
845 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
847 if (TCG_TARGET_HAS_rot_i32
) {
848 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
852 t0
= tcg_temp_ebb_new_i32();
853 t1
= tcg_temp_ebb_new_i32();
854 tcg_gen_shr_i32(t0
, arg1
, arg2
);
855 tcg_gen_subfi_i32(t1
, 32, arg2
);
856 tcg_gen_shl_i32(t1
, arg1
, t1
);
857 tcg_gen_or_i32(ret
, t0
, t1
);
858 tcg_temp_free_i32(t0
);
859 tcg_temp_free_i32(t1
);
863 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
865 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
866 /* some cases can be optimized here */
868 tcg_gen_mov_i32(ret
, arg1
);
870 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
874 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
875 unsigned int ofs
, unsigned int len
)
880 tcg_debug_assert(ofs
< 32);
881 tcg_debug_assert(len
> 0);
882 tcg_debug_assert(len
<= 32);
883 tcg_debug_assert(ofs
+ len
<= 32);
886 tcg_gen_mov_i32(ret
, arg2
);
889 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
890 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
894 t1
= tcg_temp_ebb_new_i32();
896 if (TCG_TARGET_HAS_extract2_i32
) {
897 if (ofs
+ len
== 32) {
898 tcg_gen_shli_i32(t1
, arg1
, len
);
899 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
903 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
904 tcg_gen_rotli_i32(ret
, ret
, len
);
909 mask
= (1u << len
) - 1;
910 if (ofs
+ len
< 32) {
911 tcg_gen_andi_i32(t1
, arg2
, mask
);
912 tcg_gen_shli_i32(t1
, t1
, ofs
);
914 tcg_gen_shli_i32(t1
, arg2
, ofs
);
916 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
917 tcg_gen_or_i32(ret
, ret
, t1
);
919 tcg_temp_free_i32(t1
);
922 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
923 unsigned int ofs
, unsigned int len
)
925 tcg_debug_assert(ofs
< 32);
926 tcg_debug_assert(len
> 0);
927 tcg_debug_assert(len
<= 32);
928 tcg_debug_assert(ofs
+ len
<= 32);
930 if (ofs
+ len
== 32) {
931 tcg_gen_shli_i32(ret
, arg
, ofs
);
932 } else if (ofs
== 0) {
933 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
934 } else if (TCG_TARGET_HAS_deposit_i32
935 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
936 TCGv_i32 zero
= tcg_constant_i32(0);
937 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
939 /* To help two-operand hosts we prefer to zero-extend first,
940 which allows ARG to stay live. */
943 if (TCG_TARGET_HAS_ext16u_i32
) {
944 tcg_gen_ext16u_i32(ret
, arg
);
945 tcg_gen_shli_i32(ret
, ret
, ofs
);
950 if (TCG_TARGET_HAS_ext8u_i32
) {
951 tcg_gen_ext8u_i32(ret
, arg
);
952 tcg_gen_shli_i32(ret
, ret
, ofs
);
957 /* Otherwise prefer zero-extension over AND for code size. */
960 if (TCG_TARGET_HAS_ext16u_i32
) {
961 tcg_gen_shli_i32(ret
, arg
, ofs
);
962 tcg_gen_ext16u_i32(ret
, ret
);
967 if (TCG_TARGET_HAS_ext8u_i32
) {
968 tcg_gen_shli_i32(ret
, arg
, ofs
);
969 tcg_gen_ext8u_i32(ret
, ret
);
974 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
975 tcg_gen_shli_i32(ret
, ret
, ofs
);
979 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
980 unsigned int ofs
, unsigned int len
)
982 tcg_debug_assert(ofs
< 32);
983 tcg_debug_assert(len
> 0);
984 tcg_debug_assert(len
<= 32);
985 tcg_debug_assert(ofs
+ len
<= 32);
987 /* Canonicalize certain special cases, even if extract is supported. */
988 if (ofs
+ len
== 32) {
989 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
993 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
997 if (TCG_TARGET_HAS_extract_i32
998 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
999 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
1003 /* Assume that zero-extension, if available, is cheaper than a shift. */
1004 switch (ofs
+ len
) {
1006 if (TCG_TARGET_HAS_ext16u_i32
) {
1007 tcg_gen_ext16u_i32(ret
, arg
);
1008 tcg_gen_shri_i32(ret
, ret
, ofs
);
1013 if (TCG_TARGET_HAS_ext8u_i32
) {
1014 tcg_gen_ext8u_i32(ret
, arg
);
1015 tcg_gen_shri_i32(ret
, ret
, ofs
);
1021 /* ??? Ideally we'd know what values are available for immediate AND.
1022 Assume that 8 bits are available, plus the special case of 16,
1023 so that we get ext8u, ext16u. */
1025 case 1 ... 8: case 16:
1026 tcg_gen_shri_i32(ret
, arg
, ofs
);
1027 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
1030 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1031 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
1036 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
1037 unsigned int ofs
, unsigned int len
)
1039 tcg_debug_assert(ofs
< 32);
1040 tcg_debug_assert(len
> 0);
1041 tcg_debug_assert(len
<= 32);
1042 tcg_debug_assert(ofs
+ len
<= 32);
1044 /* Canonicalize certain special cases, even if extract is supported. */
1045 if (ofs
+ len
== 32) {
1046 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
1052 tcg_gen_ext16s_i32(ret
, arg
);
1055 tcg_gen_ext8s_i32(ret
, arg
);
1060 if (TCG_TARGET_HAS_sextract_i32
1061 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
1062 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
1066 /* Assume that sign-extension, if available, is cheaper than a shift. */
1067 switch (ofs
+ len
) {
1069 if (TCG_TARGET_HAS_ext16s_i32
) {
1070 tcg_gen_ext16s_i32(ret
, arg
);
1071 tcg_gen_sari_i32(ret
, ret
, ofs
);
1076 if (TCG_TARGET_HAS_ext8s_i32
) {
1077 tcg_gen_ext8s_i32(ret
, arg
);
1078 tcg_gen_sari_i32(ret
, ret
, ofs
);
1085 if (TCG_TARGET_HAS_ext16s_i32
) {
1086 tcg_gen_shri_i32(ret
, arg
, ofs
);
1087 tcg_gen_ext16s_i32(ret
, ret
);
1092 if (TCG_TARGET_HAS_ext8s_i32
) {
1093 tcg_gen_shri_i32(ret
, arg
, ofs
);
1094 tcg_gen_ext8s_i32(ret
, ret
);
1100 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1101 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
1105 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1106 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1108 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
1111 tcg_debug_assert(ofs
<= 32);
1113 tcg_gen_mov_i32(ret
, al
);
1114 } else if (ofs
== 32) {
1115 tcg_gen_mov_i32(ret
, ah
);
1116 } else if (al
== ah
) {
1117 tcg_gen_rotri_i32(ret
, al
, ofs
);
1118 } else if (TCG_TARGET_HAS_extract2_i32
) {
1119 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
1121 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1122 tcg_gen_shri_i32(t0
, al
, ofs
);
1123 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
1124 tcg_temp_free_i32(t0
);
1128 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
1129 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
1131 if (cond
== TCG_COND_ALWAYS
) {
1132 tcg_gen_mov_i32(ret
, v1
);
1133 } else if (cond
== TCG_COND_NEVER
) {
1134 tcg_gen_mov_i32(ret
, v2
);
1136 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
1140 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1141 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1143 if (TCG_TARGET_HAS_add2_i32
) {
1144 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1146 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1147 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1148 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1149 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1150 tcg_gen_add_i64(t0
, t0
, t1
);
1151 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1152 tcg_temp_free_i64(t0
);
1153 tcg_temp_free_i64(t1
);
1157 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1158 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1160 if (TCG_TARGET_HAS_sub2_i32
) {
1161 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1163 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1164 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1165 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1166 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1167 tcg_gen_sub_i64(t0
, t0
, t1
);
1168 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1169 tcg_temp_free_i64(t0
);
1170 tcg_temp_free_i64(t1
);
1174 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1176 if (TCG_TARGET_HAS_mulu2_i32
) {
1177 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
1178 } else if (TCG_TARGET_HAS_muluh_i32
) {
1179 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1180 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1181 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
1182 tcg_gen_mov_i32(rl
, t
);
1183 tcg_temp_free_i32(t
);
1184 } else if (TCG_TARGET_REG_BITS
== 64) {
1185 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1186 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1187 tcg_gen_extu_i32_i64(t0
, arg1
);
1188 tcg_gen_extu_i32_i64(t1
, arg2
);
1189 tcg_gen_mul_i64(t0
, t0
, t1
);
1190 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1191 tcg_temp_free_i64(t0
);
1192 tcg_temp_free_i64(t1
);
1194 qemu_build_not_reached();
1198 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1200 if (TCG_TARGET_HAS_muls2_i32
) {
1201 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
1202 } else if (TCG_TARGET_HAS_mulsh_i32
) {
1203 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1204 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1205 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
1206 tcg_gen_mov_i32(rl
, t
);
1207 tcg_temp_free_i32(t
);
1208 } else if (TCG_TARGET_REG_BITS
== 32) {
1209 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1210 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1211 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1212 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
1213 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1214 /* Adjust for negative inputs. */
1215 tcg_gen_sari_i32(t2
, arg1
, 31);
1216 tcg_gen_sari_i32(t3
, arg2
, 31);
1217 tcg_gen_and_i32(t2
, t2
, arg2
);
1218 tcg_gen_and_i32(t3
, t3
, arg1
);
1219 tcg_gen_sub_i32(rh
, t1
, t2
);
1220 tcg_gen_sub_i32(rh
, rh
, t3
);
1221 tcg_gen_mov_i32(rl
, t0
);
1222 tcg_temp_free_i32(t0
);
1223 tcg_temp_free_i32(t1
);
1224 tcg_temp_free_i32(t2
);
1225 tcg_temp_free_i32(t3
);
1227 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1228 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1229 tcg_gen_ext_i32_i64(t0
, arg1
);
1230 tcg_gen_ext_i32_i64(t1
, arg2
);
1231 tcg_gen_mul_i64(t0
, t0
, t1
);
1232 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1233 tcg_temp_free_i64(t0
);
1234 tcg_temp_free_i64(t1
);
1238 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1240 if (TCG_TARGET_REG_BITS
== 32) {
1241 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1242 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1243 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1244 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1245 /* Adjust for negative input for the signed arg1. */
1246 tcg_gen_sari_i32(t2
, arg1
, 31);
1247 tcg_gen_and_i32(t2
, t2
, arg2
);
1248 tcg_gen_sub_i32(rh
, t1
, t2
);
1249 tcg_gen_mov_i32(rl
, t0
);
1250 tcg_temp_free_i32(t0
);
1251 tcg_temp_free_i32(t1
);
1252 tcg_temp_free_i32(t2
);
1254 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1255 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1256 tcg_gen_ext_i32_i64(t0
, arg1
);
1257 tcg_gen_extu_i32_i64(t1
, arg2
);
1258 tcg_gen_mul_i64(t0
, t0
, t1
);
1259 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1260 tcg_temp_free_i64(t0
);
1261 tcg_temp_free_i64(t1
);
1265 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1267 if (TCG_TARGET_HAS_ext8s_i32
) {
1268 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1270 tcg_gen_shli_i32(ret
, arg
, 24);
1271 tcg_gen_sari_i32(ret
, ret
, 24);
1275 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1277 if (TCG_TARGET_HAS_ext16s_i32
) {
1278 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1280 tcg_gen_shli_i32(ret
, arg
, 16);
1281 tcg_gen_sari_i32(ret
, ret
, 16);
1285 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1287 if (TCG_TARGET_HAS_ext8u_i32
) {
1288 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1290 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1294 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1296 if (TCG_TARGET_HAS_ext16u_i32
) {
1297 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1299 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1304 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1306 * Byte pattern: xxab -> yyba
1308 * With TCG_BSWAP_IZ, x == zero, else undefined.
1309 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1311 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1313 /* Only one extension flag may be present. */
1314 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1316 if (TCG_TARGET_HAS_bswap16_i32
) {
1317 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1319 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1320 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1322 /* arg = ..ab (IZ) xxab (!IZ) */
1323 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1324 if (!(flags
& TCG_BSWAP_IZ
)) {
1325 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1328 if (flags
& TCG_BSWAP_OS
) {
1329 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1330 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1331 } else if (flags
& TCG_BSWAP_OZ
) {
1332 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1333 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1335 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1338 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1340 /* = xaba (no flag) */
1341 tcg_temp_free_i32(t0
);
1342 tcg_temp_free_i32(t1
);
1347 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1349 * Byte pattern: abcd -> dcba
1351 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1353 if (TCG_TARGET_HAS_bswap32_i32
) {
1354 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1356 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1357 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1358 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1361 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1362 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1363 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1364 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1365 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1367 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1368 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1369 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1371 tcg_temp_free_i32(t0
);
1372 tcg_temp_free_i32(t1
);
1377 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1379 * Byte pattern: abcd -> cdab
1381 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1383 /* Swapping 2 16-bit elements is a rotate. */
1384 tcg_gen_rotli_i32(ret
, arg
, 16);
1387 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1389 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1392 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1394 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1397 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1399 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1402 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1404 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1407 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1409 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1411 tcg_gen_sari_i32(t
, a
, 31);
1412 tcg_gen_xor_i32(ret
, a
, t
);
1413 tcg_gen_sub_i32(ret
, ret
, t
);
1414 tcg_temp_free_i32(t
);
1417 void tcg_gen_ld8u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1419 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32
, ret
, arg2
, offset
);
1422 void tcg_gen_ld8s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1424 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32
, ret
, arg2
, offset
);
1427 void tcg_gen_ld16u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1429 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32
, ret
, arg2
, offset
);
1432 void tcg_gen_ld16s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1434 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32
, ret
, arg2
, offset
);
1437 void tcg_gen_ld_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1439 tcg_gen_ldst_op_i32(INDEX_op_ld_i32
, ret
, arg2
, offset
);
1442 void tcg_gen_st8_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1444 tcg_gen_ldst_op_i32(INDEX_op_st8_i32
, arg1
, arg2
, offset
);
1447 void tcg_gen_st16_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1449 tcg_gen_ldst_op_i32(INDEX_op_st16_i32
, arg1
, arg2
, offset
);
1452 void tcg_gen_st_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1454 tcg_gen_ldst_op_i32(INDEX_op_st_i32
, arg1
, arg2
, offset
);
1460 void tcg_gen_discard_i64(TCGv_i64 arg
)
1462 if (TCG_TARGET_REG_BITS
== 64) {
1463 tcg_gen_op1_i64(INDEX_op_discard
, arg
);
1465 tcg_gen_discard_i32(TCGV_LOW(arg
));
1466 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1470 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1475 if (TCG_TARGET_REG_BITS
== 64) {
1476 tcg_gen_op2_i64(INDEX_op_mov_i64
, ret
, arg
);
1478 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1480 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1481 if (ts
->kind
== TEMP_CONST
) {
1482 tcg_gen_movi_i64(ret
, ts
->val
);
1484 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1485 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1490 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1492 if (TCG_TARGET_REG_BITS
== 64) {
1493 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1495 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1496 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1500 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1502 if (TCG_TARGET_REG_BITS
== 64) {
1503 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64
, ret
, arg2
, offset
);
1505 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1506 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1510 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1512 if (TCG_TARGET_REG_BITS
== 64) {
1513 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64
, ret
, arg2
, offset
);
1515 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1516 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1520 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1522 if (TCG_TARGET_REG_BITS
== 64) {
1523 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64
, ret
, arg2
, offset
);
1525 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1526 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1530 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1532 if (TCG_TARGET_REG_BITS
== 64) {
1533 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64
, ret
, arg2
, offset
);
1535 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1536 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1540 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1542 if (TCG_TARGET_REG_BITS
== 64) {
1543 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64
, ret
, arg2
, offset
);
1545 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1546 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1550 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1552 if (TCG_TARGET_REG_BITS
== 64) {
1553 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64
, ret
, arg2
, offset
);
1555 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1556 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1560 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1563 * For 32-bit host, since arg2 and ret have different types,
1564 * they cannot be the same temporary -- no chance of overlap.
1566 if (TCG_TARGET_REG_BITS
== 64) {
1567 tcg_gen_ldst_op_i64(INDEX_op_ld_i64
, ret
, arg2
, offset
);
1568 } else if (HOST_BIG_ENDIAN
) {
1569 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1570 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1572 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1573 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1577 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1579 if (TCG_TARGET_REG_BITS
== 64) {
1580 tcg_gen_ldst_op_i64(INDEX_op_st8_i64
, arg1
, arg2
, offset
);
1582 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1586 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1588 if (TCG_TARGET_REG_BITS
== 64) {
1589 tcg_gen_ldst_op_i64(INDEX_op_st16_i64
, arg1
, arg2
, offset
);
1591 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1595 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1597 if (TCG_TARGET_REG_BITS
== 64) {
1598 tcg_gen_ldst_op_i64(INDEX_op_st32_i64
, arg1
, arg2
, offset
);
1600 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1604 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1606 if (TCG_TARGET_REG_BITS
== 64) {
1607 tcg_gen_ldst_op_i64(INDEX_op_st_i64
, arg1
, arg2
, offset
);
1608 } else if (HOST_BIG_ENDIAN
) {
1609 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1610 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1612 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1613 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1617 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1619 if (TCG_TARGET_REG_BITS
== 64) {
1620 tcg_gen_op3_i64(INDEX_op_add_i64
, ret
, arg1
, arg2
);
1622 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1623 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1627 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1629 if (TCG_TARGET_REG_BITS
== 64) {
1630 tcg_gen_op3_i64(INDEX_op_sub_i64
, ret
, arg1
, arg2
);
1632 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1633 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1637 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1639 if (TCG_TARGET_REG_BITS
== 64) {
1640 tcg_gen_op3_i64(INDEX_op_and_i64
, ret
, arg1
, arg2
);
1642 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1643 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1647 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1649 if (TCG_TARGET_REG_BITS
== 64) {
1650 tcg_gen_op3_i64(INDEX_op_or_i64
, ret
, arg1
, arg2
);
1652 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1653 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1657 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1659 if (TCG_TARGET_REG_BITS
== 64) {
1660 tcg_gen_op3_i64(INDEX_op_xor_i64
, ret
, arg1
, arg2
);
1662 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1663 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1667 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1669 if (TCG_TARGET_REG_BITS
== 64) {
1670 tcg_gen_op3_i64(INDEX_op_shl_i64
, ret
, arg1
, arg2
);
1672 gen_helper_shl_i64(ret
, arg1
, arg2
);
1676 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1678 if (TCG_TARGET_REG_BITS
== 64) {
1679 tcg_gen_op3_i64(INDEX_op_shr_i64
, ret
, arg1
, arg2
);
1681 gen_helper_shr_i64(ret
, arg1
, arg2
);
1685 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1687 if (TCG_TARGET_REG_BITS
== 64) {
1688 tcg_gen_op3_i64(INDEX_op_sar_i64
, ret
, arg1
, arg2
);
1690 gen_helper_sar_i64(ret
, arg1
, arg2
);
1694 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1699 if (TCG_TARGET_REG_BITS
== 64) {
1700 tcg_gen_op3_i64(INDEX_op_mul_i64
, ret
, arg1
, arg2
);
1705 t0
= tcg_temp_ebb_new_i64();
1706 t1
= tcg_temp_ebb_new_i32();
1708 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1709 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1711 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1712 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1713 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1714 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1716 tcg_gen_mov_i64(ret
, t0
);
1717 tcg_temp_free_i64(t0
);
1718 tcg_temp_free_i32(t1
);
1721 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1723 /* some cases can be optimized here */
1725 tcg_gen_mov_i64(ret
, arg1
);
1726 } else if (TCG_TARGET_REG_BITS
== 64) {
1727 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1729 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1730 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1731 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1735 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1738 tcg_gen_neg_i64(ret
, arg2
);
1739 } else if (TCG_TARGET_REG_BITS
== 64) {
1740 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1742 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1743 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1744 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1748 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1750 tcg_gen_addi_i64(ret
, arg1
, -arg2
);
1753 void tcg_gen_neg_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1755 if (TCG_TARGET_REG_BITS
== 64) {
1756 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg
);
1758 TCGv_i32 zero
= tcg_constant_i32(0);
1759 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1760 zero
, zero
, TCGV_LOW(arg
), TCGV_HIGH(arg
));
1764 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1766 if (TCG_TARGET_REG_BITS
== 32) {
1767 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1768 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1772 /* Some cases can be optimized here. */
1775 tcg_gen_movi_i64(ret
, 0);
1778 tcg_gen_mov_i64(ret
, arg1
);
1781 /* Don't recurse with tcg_gen_ext8u_i64. */
1782 if (TCG_TARGET_HAS_ext8u_i64
) {
1783 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1788 if (TCG_TARGET_HAS_ext16u_i64
) {
1789 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1794 if (TCG_TARGET_HAS_ext32u_i64
) {
1795 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1801 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1804 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1806 if (TCG_TARGET_REG_BITS
== 32) {
1807 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1808 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1811 /* Some cases can be optimized here. */
1813 tcg_gen_movi_i64(ret
, -1);
1814 } else if (arg2
== 0) {
1815 tcg_gen_mov_i64(ret
, arg1
);
1817 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1821 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1823 if (TCG_TARGET_REG_BITS
== 32) {
1824 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1825 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1828 /* Some cases can be optimized here. */
1830 tcg_gen_mov_i64(ret
, arg1
);
1831 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1832 /* Don't recurse with tcg_gen_not_i64. */
1833 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1835 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1839 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1840 unsigned c
, bool right
, bool arith
)
1842 tcg_debug_assert(c
< 64);
1844 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1845 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1846 } else if (c
>= 32) {
1850 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1851 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1853 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1854 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1857 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1858 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1861 if (TCG_TARGET_HAS_extract2_i32
) {
1862 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1863 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1865 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1866 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1867 TCGV_HIGH(arg1
), 32 - c
, c
);
1870 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1872 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1875 if (TCG_TARGET_HAS_extract2_i32
) {
1876 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1877 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1879 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1880 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1881 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1882 TCGV_HIGH(arg1
), c
, 32 - c
);
1883 tcg_temp_free_i32(t0
);
1885 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1889 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1891 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1892 if (TCG_TARGET_REG_BITS
== 32) {
1893 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1894 } else if (arg2
== 0) {
1895 tcg_gen_mov_i64(ret
, arg1
);
1897 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1901 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1903 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1904 if (TCG_TARGET_REG_BITS
== 32) {
1905 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1906 } else if (arg2
== 0) {
1907 tcg_gen_mov_i64(ret
, arg1
);
1909 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1913 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1915 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1916 if (TCG_TARGET_REG_BITS
== 32) {
1917 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1918 } else if (arg2
== 0) {
1919 tcg_gen_mov_i64(ret
, arg1
);
1921 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1925 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1927 if (cond
== TCG_COND_ALWAYS
) {
1929 } else if (cond
!= TCG_COND_NEVER
) {
1930 if (TCG_TARGET_REG_BITS
== 32) {
1931 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1932 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1933 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1935 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1938 add_last_as_label_use(l
);
1942 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1944 if (TCG_TARGET_REG_BITS
== 64) {
1945 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1946 } else if (cond
== TCG_COND_ALWAYS
) {
1948 } else if (cond
!= TCG_COND_NEVER
) {
1949 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1950 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1951 tcg_constant_i32(arg2
),
1952 tcg_constant_i32(arg2
>> 32),
1953 cond
, label_arg(l
));
1954 add_last_as_label_use(l
);
1958 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1959 TCGv_i64 arg1
, TCGv_i64 arg2
)
1961 if (cond
== TCG_COND_ALWAYS
) {
1962 tcg_gen_movi_i64(ret
, 1);
1963 } else if (cond
== TCG_COND_NEVER
) {
1964 tcg_gen_movi_i64(ret
, 0);
1966 if (TCG_TARGET_REG_BITS
== 32) {
1967 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1968 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1969 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1970 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1972 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1977 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1978 TCGv_i64 arg1
, int64_t arg2
)
1980 if (TCG_TARGET_REG_BITS
== 64) {
1981 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1982 } else if (cond
== TCG_COND_ALWAYS
) {
1983 tcg_gen_movi_i64(ret
, 1);
1984 } else if (cond
== TCG_COND_NEVER
) {
1985 tcg_gen_movi_i64(ret
, 0);
1987 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1988 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1989 tcg_constant_i32(arg2
),
1990 tcg_constant_i32(arg2
>> 32), cond
);
1991 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1995 void tcg_gen_negsetcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1996 TCGv_i64 arg1
, int64_t arg2
)
1998 tcg_gen_negsetcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
2001 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
2002 TCGv_i64 arg1
, TCGv_i64 arg2
)
2004 if (cond
== TCG_COND_ALWAYS
) {
2005 tcg_gen_movi_i64(ret
, -1);
2006 } else if (cond
== TCG_COND_NEVER
) {
2007 tcg_gen_movi_i64(ret
, 0);
2008 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
2009 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
2010 } else if (TCG_TARGET_REG_BITS
== 32) {
2011 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2012 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2013 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
2014 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
2015 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
2017 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
2018 tcg_gen_neg_i64(ret
, ret
);
2022 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2025 tcg_gen_movi_i64(ret
, 0);
2026 } else if (is_power_of_2(arg2
)) {
2027 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
2029 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2033 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2035 if (TCG_TARGET_HAS_div_i64
) {
2036 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
2037 } else if (TCG_TARGET_HAS_div2_i64
) {
2038 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2039 tcg_gen_sari_i64(t0
, arg1
, 63);
2040 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
2041 tcg_temp_free_i64(t0
);
2043 gen_helper_div_i64(ret
, arg1
, arg2
);
2047 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2049 if (TCG_TARGET_HAS_rem_i64
) {
2050 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
2051 } else if (TCG_TARGET_HAS_div_i64
) {
2052 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2053 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
2054 tcg_gen_mul_i64(t0
, t0
, arg2
);
2055 tcg_gen_sub_i64(ret
, arg1
, t0
);
2056 tcg_temp_free_i64(t0
);
2057 } else if (TCG_TARGET_HAS_div2_i64
) {
2058 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2059 tcg_gen_sari_i64(t0
, arg1
, 63);
2060 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
2061 tcg_temp_free_i64(t0
);
2063 gen_helper_rem_i64(ret
, arg1
, arg2
);
2067 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2069 if (TCG_TARGET_HAS_div_i64
) {
2070 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
2071 } else if (TCG_TARGET_HAS_div2_i64
) {
2072 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2073 TCGv_i64 zero
= tcg_constant_i64(0);
2074 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, zero
, arg2
);
2075 tcg_temp_free_i64(t0
);
2077 gen_helper_divu_i64(ret
, arg1
, arg2
);
2081 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2083 if (TCG_TARGET_HAS_rem_i64
) {
2084 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
2085 } else if (TCG_TARGET_HAS_div_i64
) {
2086 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2087 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
2088 tcg_gen_mul_i64(t0
, t0
, arg2
);
2089 tcg_gen_sub_i64(ret
, arg1
, t0
);
2090 tcg_temp_free_i64(t0
);
2091 } else if (TCG_TARGET_HAS_div2_i64
) {
2092 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2093 TCGv_i64 zero
= tcg_constant_i64(0);
2094 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, zero
, arg2
);
2095 tcg_temp_free_i64(t0
);
2097 gen_helper_remu_i64(ret
, arg1
, arg2
);
2101 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2103 if (TCG_TARGET_REG_BITS
== 32) {
2104 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2105 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2106 } else if (TCG_TARGET_HAS_ext8s_i64
) {
2107 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
2109 tcg_gen_shli_i64(ret
, arg
, 56);
2110 tcg_gen_sari_i64(ret
, ret
, 56);
2114 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2116 if (TCG_TARGET_REG_BITS
== 32) {
2117 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2118 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2119 } else if (TCG_TARGET_HAS_ext16s_i64
) {
2120 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
2122 tcg_gen_shli_i64(ret
, arg
, 48);
2123 tcg_gen_sari_i64(ret
, ret
, 48);
2127 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2129 if (TCG_TARGET_REG_BITS
== 32) {
2130 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2131 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2132 } else if (TCG_TARGET_HAS_ext32s_i64
) {
2133 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
2135 tcg_gen_shli_i64(ret
, arg
, 32);
2136 tcg_gen_sari_i64(ret
, ret
, 32);
2140 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2142 if (TCG_TARGET_REG_BITS
== 32) {
2143 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2144 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2145 } else if (TCG_TARGET_HAS_ext8u_i64
) {
2146 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
2148 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
2152 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2154 if (TCG_TARGET_REG_BITS
== 32) {
2155 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2156 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2157 } else if (TCG_TARGET_HAS_ext16u_i64
) {
2158 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
2160 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
2164 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2166 if (TCG_TARGET_REG_BITS
== 32) {
2167 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2168 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2169 } else if (TCG_TARGET_HAS_ext32u_i64
) {
2170 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
2172 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
2177 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2179 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2181 * With TCG_BSWAP_IZ, x == zero, else undefined.
2182 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2184 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2186 /* Only one extension flag may be present. */
2187 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2189 if (TCG_TARGET_REG_BITS
== 32) {
2190 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
2191 if (flags
& TCG_BSWAP_OS
) {
2192 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2194 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2196 } else if (TCG_TARGET_HAS_bswap16_i64
) {
2197 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
2199 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2200 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2202 /* arg = ......ab or xxxxxxab */
2203 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
2204 if (!(flags
& TCG_BSWAP_IZ
)) {
2205 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
2208 if (flags
& TCG_BSWAP_OS
) {
2209 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
2210 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
2211 } else if (flags
& TCG_BSWAP_OZ
) {
2212 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
2213 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
2215 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
2218 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
2220 /* xxxxxaba (no flag) */
2221 tcg_temp_free_i64(t0
);
2222 tcg_temp_free_i64(t1
);
2227 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2229 * Byte pattern: xxxxabcd -> yyyydcba
2231 * With TCG_BSWAP_IZ, x == zero, else undefined.
2232 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2234 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2236 /* Only one extension flag may be present. */
2237 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2239 if (TCG_TARGET_REG_BITS
== 32) {
2240 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2241 if (flags
& TCG_BSWAP_OS
) {
2242 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2244 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2246 } else if (TCG_TARGET_HAS_bswap32_i64
) {
2247 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
2249 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2250 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2251 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
2253 /* arg = xxxxabcd */
2254 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
2255 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
2256 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
2257 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
2258 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
2260 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
2261 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
2262 if (flags
& TCG_BSWAP_OS
) {
2263 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
2265 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
2267 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba (OS) */
2268 /* ....dcba (else) */
2270 tcg_temp_free_i64(t0
);
2271 tcg_temp_free_i64(t1
);
2276 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2278 * Byte pattern: abcdefgh -> hgfedcba
2280 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2282 if (TCG_TARGET_REG_BITS
== 32) {
2284 t0
= tcg_temp_ebb_new_i32();
2285 t1
= tcg_temp_ebb_new_i32();
2287 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
2288 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
2289 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
2290 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
2291 tcg_temp_free_i32(t0
);
2292 tcg_temp_free_i32(t1
);
2293 } else if (TCG_TARGET_HAS_bswap64_i64
) {
2294 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
2296 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2297 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2298 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2300 /* arg = abcdefgh */
2301 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
2302 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
2303 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
2304 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
2305 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
2306 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
2308 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
2309 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
2310 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
2311 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
2312 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
2313 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
2315 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
2316 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
2317 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
2319 tcg_temp_free_i64(t0
);
2320 tcg_temp_free_i64(t1
);
2321 tcg_temp_free_i64(t2
);
2326 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2327 * See also include/qemu/bitops.h, hswap64.
2329 * Byte pattern: abcdefgh -> ghefcdab
2331 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2333 uint64_t m
= 0x0000ffff0000ffffull
;
2334 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2335 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2337 /* arg = abcdefgh */
2338 tcg_gen_rotli_i64(t1
, arg
, 32); /* t1 = efghabcd */
2339 tcg_gen_andi_i64(t0
, t1
, m
); /* t0 = ..gh..cd */
2340 tcg_gen_shli_i64(t0
, t0
, 16); /* t0 = gh..cd.. */
2341 tcg_gen_shri_i64(t1
, t1
, 16); /* t1 = ..efghab */
2342 tcg_gen_andi_i64(t1
, t1
, m
); /* t1 = ..ef..ab */
2343 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ghefcdab */
2345 tcg_temp_free_i64(t0
);
2346 tcg_temp_free_i64(t1
);
2350 * wswap_i64: Swap 32-bit words within a 64-bit value.
2352 * Byte pattern: abcdefgh -> efghabcd
2354 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2356 /* Swapping 2 32-bit elements is a rotate. */
2357 tcg_gen_rotli_i64(ret
, arg
, 32);
2360 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2362 if (TCG_TARGET_REG_BITS
== 32) {
2363 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2364 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
2365 } else if (TCG_TARGET_HAS_not_i64
) {
2366 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
2368 tcg_gen_xori_i64(ret
, arg
, -1);
2372 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2374 if (TCG_TARGET_REG_BITS
== 32) {
2375 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2376 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2377 } else if (TCG_TARGET_HAS_andc_i64
) {
2378 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
2380 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2381 tcg_gen_not_i64(t0
, arg2
);
2382 tcg_gen_and_i64(ret
, arg1
, t0
);
2383 tcg_temp_free_i64(t0
);
2387 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2389 if (TCG_TARGET_REG_BITS
== 32) {
2390 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2391 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2392 } else if (TCG_TARGET_HAS_eqv_i64
) {
2393 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
2395 tcg_gen_xor_i64(ret
, arg1
, arg2
);
2396 tcg_gen_not_i64(ret
, ret
);
2400 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2402 if (TCG_TARGET_REG_BITS
== 32) {
2403 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2404 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2405 } else if (TCG_TARGET_HAS_nand_i64
) {
2406 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
2408 tcg_gen_and_i64(ret
, arg1
, arg2
);
2409 tcg_gen_not_i64(ret
, ret
);
2413 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2415 if (TCG_TARGET_REG_BITS
== 32) {
2416 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2417 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2418 } else if (TCG_TARGET_HAS_nor_i64
) {
2419 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
2421 tcg_gen_or_i64(ret
, arg1
, arg2
);
2422 tcg_gen_not_i64(ret
, ret
);
2426 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2428 if (TCG_TARGET_REG_BITS
== 32) {
2429 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2430 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2431 } else if (TCG_TARGET_HAS_orc_i64
) {
2432 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2434 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2435 tcg_gen_not_i64(t0
, arg2
);
2436 tcg_gen_or_i64(ret
, arg1
, t0
);
2437 tcg_temp_free_i64(t0
);
2441 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2443 if (TCG_TARGET_HAS_clz_i64
) {
2444 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2446 gen_helper_clz_i64(ret
, arg1
, arg2
);
2450 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2452 if (TCG_TARGET_REG_BITS
== 32
2453 && TCG_TARGET_HAS_clz_i32
2454 && arg2
<= 0xffffffffu
) {
2455 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2456 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2457 tcg_gen_addi_i32(t
, t
, 32);
2458 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2459 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2460 tcg_temp_free_i32(t
);
2462 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2466 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2468 if (TCG_TARGET_HAS_ctz_i64
) {
2469 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2470 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2471 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2473 if (TCG_TARGET_HAS_ctpop_i64
) {
2474 tcg_gen_subi_i64(t
, arg1
, 1);
2475 tcg_gen_andc_i64(t
, t
, arg1
);
2476 tcg_gen_ctpop_i64(t
, t
);
2478 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2479 tcg_gen_neg_i64(t
, arg1
);
2480 tcg_gen_and_i64(t
, t
, arg1
);
2481 tcg_gen_clzi_i64(t
, t
, 64);
2482 tcg_gen_xori_i64(t
, t
, 63);
2484 z
= tcg_constant_i64(0);
2485 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2486 tcg_temp_free_i64(t
);
2487 tcg_temp_free_i64(z
);
2489 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2493 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2495 if (TCG_TARGET_REG_BITS
== 32
2496 && TCG_TARGET_HAS_ctz_i32
2497 && arg2
<= 0xffffffffu
) {
2498 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2499 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2500 tcg_gen_addi_i32(t32
, t32
, 32);
2501 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2502 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2503 tcg_temp_free_i32(t32
);
2504 } else if (!TCG_TARGET_HAS_ctz_i64
2505 && TCG_TARGET_HAS_ctpop_i64
2507 /* This equivalence has the advantage of not requiring a fixup. */
2508 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2509 tcg_gen_subi_i64(t
, arg1
, 1);
2510 tcg_gen_andc_i64(t
, t
, arg1
);
2511 tcg_gen_ctpop_i64(ret
, t
);
2512 tcg_temp_free_i64(t
);
2514 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2518 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2520 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2521 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2522 tcg_gen_sari_i64(t
, arg
, 63);
2523 tcg_gen_xor_i64(t
, t
, arg
);
2524 tcg_gen_clzi_i64(t
, t
, 64);
2525 tcg_gen_subi_i64(ret
, t
, 1);
2526 tcg_temp_free_i64(t
);
2528 gen_helper_clrsb_i64(ret
, arg
);
2532 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2534 if (TCG_TARGET_HAS_ctpop_i64
) {
2535 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2536 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2537 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2538 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2539 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2540 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2542 gen_helper_ctpop_i64(ret
, arg1
);
2546 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2548 if (TCG_TARGET_HAS_rot_i64
) {
2549 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2552 t0
= tcg_temp_ebb_new_i64();
2553 t1
= tcg_temp_ebb_new_i64();
2554 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2555 tcg_gen_subfi_i64(t1
, 64, arg2
);
2556 tcg_gen_shr_i64(t1
, arg1
, t1
);
2557 tcg_gen_or_i64(ret
, t0
, t1
);
2558 tcg_temp_free_i64(t0
);
2559 tcg_temp_free_i64(t1
);
2563 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2565 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2566 /* some cases can be optimized here */
2568 tcg_gen_mov_i64(ret
, arg1
);
2569 } else if (TCG_TARGET_HAS_rot_i64
) {
2570 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2573 t0
= tcg_temp_ebb_new_i64();
2574 t1
= tcg_temp_ebb_new_i64();
2575 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2576 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2577 tcg_gen_or_i64(ret
, t0
, t1
);
2578 tcg_temp_free_i64(t0
);
2579 tcg_temp_free_i64(t1
);
2583 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2585 if (TCG_TARGET_HAS_rot_i64
) {
2586 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2589 t0
= tcg_temp_ebb_new_i64();
2590 t1
= tcg_temp_ebb_new_i64();
2591 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2592 tcg_gen_subfi_i64(t1
, 64, arg2
);
2593 tcg_gen_shl_i64(t1
, arg1
, t1
);
2594 tcg_gen_or_i64(ret
, t0
, t1
);
2595 tcg_temp_free_i64(t0
);
2596 tcg_temp_free_i64(t1
);
2600 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2602 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2603 /* some cases can be optimized here */
2605 tcg_gen_mov_i64(ret
, arg1
);
2607 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2611 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2612 unsigned int ofs
, unsigned int len
)
2617 tcg_debug_assert(ofs
< 64);
2618 tcg_debug_assert(len
> 0);
2619 tcg_debug_assert(len
<= 64);
2620 tcg_debug_assert(ofs
+ len
<= 64);
2623 tcg_gen_mov_i64(ret
, arg2
);
2626 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2627 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2631 if (TCG_TARGET_REG_BITS
== 32) {
2633 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2634 TCGV_LOW(arg2
), ofs
- 32, len
);
2635 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2638 if (ofs
+ len
<= 32) {
2639 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2640 TCGV_LOW(arg2
), ofs
, len
);
2641 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2646 t1
= tcg_temp_ebb_new_i64();
2648 if (TCG_TARGET_HAS_extract2_i64
) {
2649 if (ofs
+ len
== 64) {
2650 tcg_gen_shli_i64(t1
, arg1
, len
);
2651 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2655 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2656 tcg_gen_rotli_i64(ret
, ret
, len
);
2661 mask
= (1ull << len
) - 1;
2662 if (ofs
+ len
< 64) {
2663 tcg_gen_andi_i64(t1
, arg2
, mask
);
2664 tcg_gen_shli_i64(t1
, t1
, ofs
);
2666 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2668 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2669 tcg_gen_or_i64(ret
, ret
, t1
);
2671 tcg_temp_free_i64(t1
);
2674 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2675 unsigned int ofs
, unsigned int len
)
2677 tcg_debug_assert(ofs
< 64);
2678 tcg_debug_assert(len
> 0);
2679 tcg_debug_assert(len
<= 64);
2680 tcg_debug_assert(ofs
+ len
<= 64);
2682 if (ofs
+ len
== 64) {
2683 tcg_gen_shli_i64(ret
, arg
, ofs
);
2684 } else if (ofs
== 0) {
2685 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2686 } else if (TCG_TARGET_HAS_deposit_i64
2687 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2688 TCGv_i64 zero
= tcg_constant_i64(0);
2689 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2691 if (TCG_TARGET_REG_BITS
== 32) {
2693 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2695 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2698 if (ofs
+ len
<= 32) {
2699 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2700 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2704 /* To help two-operand hosts we prefer to zero-extend first,
2705 which allows ARG to stay live. */
2708 if (TCG_TARGET_HAS_ext32u_i64
) {
2709 tcg_gen_ext32u_i64(ret
, arg
);
2710 tcg_gen_shli_i64(ret
, ret
, ofs
);
2715 if (TCG_TARGET_HAS_ext16u_i64
) {
2716 tcg_gen_ext16u_i64(ret
, arg
);
2717 tcg_gen_shli_i64(ret
, ret
, ofs
);
2722 if (TCG_TARGET_HAS_ext8u_i64
) {
2723 tcg_gen_ext8u_i64(ret
, arg
);
2724 tcg_gen_shli_i64(ret
, ret
, ofs
);
2729 /* Otherwise prefer zero-extension over AND for code size. */
2730 switch (ofs
+ len
) {
2732 if (TCG_TARGET_HAS_ext32u_i64
) {
2733 tcg_gen_shli_i64(ret
, arg
, ofs
);
2734 tcg_gen_ext32u_i64(ret
, ret
);
2739 if (TCG_TARGET_HAS_ext16u_i64
) {
2740 tcg_gen_shli_i64(ret
, arg
, ofs
);
2741 tcg_gen_ext16u_i64(ret
, ret
);
2746 if (TCG_TARGET_HAS_ext8u_i64
) {
2747 tcg_gen_shli_i64(ret
, arg
, ofs
);
2748 tcg_gen_ext8u_i64(ret
, ret
);
2753 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2754 tcg_gen_shli_i64(ret
, ret
, ofs
);
2758 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2759 unsigned int ofs
, unsigned int len
)
2761 tcg_debug_assert(ofs
< 64);
2762 tcg_debug_assert(len
> 0);
2763 tcg_debug_assert(len
<= 64);
2764 tcg_debug_assert(ofs
+ len
<= 64);
2766 /* Canonicalize certain special cases, even if extract is supported. */
2767 if (ofs
+ len
== 64) {
2768 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2772 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2776 if (TCG_TARGET_REG_BITS
== 32) {
2777 /* Look for a 32-bit extract within one of the two words. */
2779 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2780 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2783 if (ofs
+ len
<= 32) {
2784 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2785 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2788 /* The field is split across two words. One double-word
2789 shift is better than two double-word shifts. */
2793 if (TCG_TARGET_HAS_extract_i64
2794 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2795 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2799 /* Assume that zero-extension, if available, is cheaper than a shift. */
2800 switch (ofs
+ len
) {
2802 if (TCG_TARGET_HAS_ext32u_i64
) {
2803 tcg_gen_ext32u_i64(ret
, arg
);
2804 tcg_gen_shri_i64(ret
, ret
, ofs
);
2809 if (TCG_TARGET_HAS_ext16u_i64
) {
2810 tcg_gen_ext16u_i64(ret
, arg
);
2811 tcg_gen_shri_i64(ret
, ret
, ofs
);
2816 if (TCG_TARGET_HAS_ext8u_i64
) {
2817 tcg_gen_ext8u_i64(ret
, arg
);
2818 tcg_gen_shri_i64(ret
, ret
, ofs
);
2824 /* ??? Ideally we'd know what values are available for immediate AND.
2825 Assume that 8 bits are available, plus the special cases of 16 and 32,
2826 so that we get ext8u, ext16u, and ext32u. */
2828 case 1 ... 8: case 16: case 32:
2830 tcg_gen_shri_i64(ret
, arg
, ofs
);
2831 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2834 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2835 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2840 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2841 unsigned int ofs
, unsigned int len
)
2843 tcg_debug_assert(ofs
< 64);
2844 tcg_debug_assert(len
> 0);
2845 tcg_debug_assert(len
<= 64);
2846 tcg_debug_assert(ofs
+ len
<= 64);
2848 /* Canonicalize certain special cases, even if sextract is supported. */
2849 if (ofs
+ len
== 64) {
2850 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2856 tcg_gen_ext32s_i64(ret
, arg
);
2859 tcg_gen_ext16s_i64(ret
, arg
);
2862 tcg_gen_ext8s_i64(ret
, arg
);
2867 if (TCG_TARGET_REG_BITS
== 32) {
2868 /* Look for a 32-bit extract within one of the two words. */
2870 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2871 } else if (ofs
+ len
<= 32) {
2872 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2873 } else if (ofs
== 0) {
2874 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2875 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2877 } else if (len
> 32) {
2878 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2879 /* Extract the bits for the high word normally. */
2880 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2881 /* Shift the field down for the low part. */
2882 tcg_gen_shri_i64(ret
, arg
, ofs
);
2883 /* Overwrite the shift into the high part. */
2884 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2885 tcg_temp_free_i32(t
);
2888 /* Shift the field down for the low part, such that the
2889 field sits at the MSB. */
2890 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2891 /* Shift the field down from the MSB, sign extending. */
2892 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2894 /* Sign-extend the field from 32 bits. */
2895 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2899 if (TCG_TARGET_HAS_sextract_i64
2900 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2901 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2905 /* Assume that sign-extension, if available, is cheaper than a shift. */
2906 switch (ofs
+ len
) {
2908 if (TCG_TARGET_HAS_ext32s_i64
) {
2909 tcg_gen_ext32s_i64(ret
, arg
);
2910 tcg_gen_sari_i64(ret
, ret
, ofs
);
2915 if (TCG_TARGET_HAS_ext16s_i64
) {
2916 tcg_gen_ext16s_i64(ret
, arg
);
2917 tcg_gen_sari_i64(ret
, ret
, ofs
);
2922 if (TCG_TARGET_HAS_ext8s_i64
) {
2923 tcg_gen_ext8s_i64(ret
, arg
);
2924 tcg_gen_sari_i64(ret
, ret
, ofs
);
2931 if (TCG_TARGET_HAS_ext32s_i64
) {
2932 tcg_gen_shri_i64(ret
, arg
, ofs
);
2933 tcg_gen_ext32s_i64(ret
, ret
);
2938 if (TCG_TARGET_HAS_ext16s_i64
) {
2939 tcg_gen_shri_i64(ret
, arg
, ofs
);
2940 tcg_gen_ext16s_i64(ret
, ret
);
2945 if (TCG_TARGET_HAS_ext8s_i64
) {
2946 tcg_gen_shri_i64(ret
, arg
, ofs
);
2947 tcg_gen_ext8s_i64(ret
, ret
);
2952 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2953 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2957 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2958 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2960 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2963 tcg_debug_assert(ofs
<= 64);
2965 tcg_gen_mov_i64(ret
, al
);
2966 } else if (ofs
== 64) {
2967 tcg_gen_mov_i64(ret
, ah
);
2968 } else if (al
== ah
) {
2969 tcg_gen_rotri_i64(ret
, al
, ofs
);
2970 } else if (TCG_TARGET_HAS_extract2_i64
) {
2971 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2973 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2974 tcg_gen_shri_i64(t0
, al
, ofs
);
2975 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2976 tcg_temp_free_i64(t0
);
2980 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2981 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2983 if (cond
== TCG_COND_ALWAYS
) {
2984 tcg_gen_mov_i64(ret
, v1
);
2985 } else if (cond
== TCG_COND_NEVER
) {
2986 tcg_gen_mov_i64(ret
, v2
);
2987 } else if (TCG_TARGET_REG_BITS
== 64) {
2988 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2990 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2991 TCGv_i32 zero
= tcg_constant_i32(0);
2993 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2994 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2995 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2997 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, zero
,
2998 TCGV_LOW(v1
), TCGV_LOW(v2
));
2999 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, zero
,
3000 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
3002 tcg_temp_free_i32(t0
);
3006 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3007 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3009 if (TCG_TARGET_HAS_add2_i64
) {
3010 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3012 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3013 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3014 tcg_gen_add_i64(t0
, al
, bl
);
3015 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
3016 tcg_gen_add_i64(rh
, ah
, bh
);
3017 tcg_gen_add_i64(rh
, rh
, t1
);
3018 tcg_gen_mov_i64(rl
, t0
);
3019 tcg_temp_free_i64(t0
);
3020 tcg_temp_free_i64(t1
);
3024 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3025 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3027 if (TCG_TARGET_HAS_sub2_i64
) {
3028 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3030 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3031 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3032 tcg_gen_sub_i64(t0
, al
, bl
);
3033 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
3034 tcg_gen_sub_i64(rh
, ah
, bh
);
3035 tcg_gen_sub_i64(rh
, rh
, t1
);
3036 tcg_gen_mov_i64(rl
, t0
);
3037 tcg_temp_free_i64(t0
);
3038 tcg_temp_free_i64(t1
);
3042 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3044 if (TCG_TARGET_HAS_mulu2_i64
) {
3045 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
3046 } else if (TCG_TARGET_HAS_muluh_i64
) {
3047 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3048 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3049 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
3050 tcg_gen_mov_i64(rl
, t
);
3051 tcg_temp_free_i64(t
);
3053 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3054 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3055 gen_helper_muluh_i64(rh
, arg1
, arg2
);
3056 tcg_gen_mov_i64(rl
, t0
);
3057 tcg_temp_free_i64(t0
);
3061 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3063 if (TCG_TARGET_HAS_muls2_i64
) {
3064 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
3065 } else if (TCG_TARGET_HAS_mulsh_i64
) {
3066 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3067 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3068 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
3069 tcg_gen_mov_i64(rl
, t
);
3070 tcg_temp_free_i64(t
);
3071 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
3072 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3073 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3074 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3075 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
3076 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3077 /* Adjust for negative inputs. */
3078 tcg_gen_sari_i64(t2
, arg1
, 63);
3079 tcg_gen_sari_i64(t3
, arg2
, 63);
3080 tcg_gen_and_i64(t2
, t2
, arg2
);
3081 tcg_gen_and_i64(t3
, t3
, arg1
);
3082 tcg_gen_sub_i64(rh
, t1
, t2
);
3083 tcg_gen_sub_i64(rh
, rh
, t3
);
3084 tcg_gen_mov_i64(rl
, t0
);
3085 tcg_temp_free_i64(t0
);
3086 tcg_temp_free_i64(t1
);
3087 tcg_temp_free_i64(t2
);
3088 tcg_temp_free_i64(t3
);
3090 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3091 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3092 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
3093 tcg_gen_mov_i64(rl
, t0
);
3094 tcg_temp_free_i64(t0
);
3098 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3100 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3101 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3102 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3103 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3104 /* Adjust for negative input for the signed arg1. */
3105 tcg_gen_sari_i64(t2
, arg1
, 63);
3106 tcg_gen_and_i64(t2
, t2
, arg2
);
3107 tcg_gen_sub_i64(rh
, t1
, t2
);
3108 tcg_gen_mov_i64(rl
, t0
);
3109 tcg_temp_free_i64(t0
);
3110 tcg_temp_free_i64(t1
);
3111 tcg_temp_free_i64(t2
);
3114 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3116 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
3119 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3121 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
3124 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3126 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
3129 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3131 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
3134 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
3136 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3138 tcg_gen_sari_i64(t
, a
, 63);
3139 tcg_gen_xor_i64(ret
, a
, t
);
3140 tcg_gen_sub_i64(ret
, ret
, t
);
3141 tcg_temp_free_i64(t
);
3144 /* Size changing operations. */
3146 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3148 if (TCG_TARGET_REG_BITS
== 32) {
3149 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
3150 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3151 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
3152 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3154 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
3158 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3160 if (TCG_TARGET_REG_BITS
== 32) {
3161 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
3162 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3163 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
3164 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3166 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3167 tcg_gen_shri_i64(t
, arg
, 32);
3168 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
3169 tcg_temp_free_i64(t
);
3173 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3175 if (TCG_TARGET_REG_BITS
== 32) {
3176 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3177 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
3179 tcg_gen_op2(INDEX_op_extu_i32_i64
,
3180 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3184 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3186 if (TCG_TARGET_REG_BITS
== 32) {
3187 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3188 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
3190 tcg_gen_op2(INDEX_op_ext_i32_i64
,
3191 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3195 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
3199 if (TCG_TARGET_REG_BITS
== 32) {
3200 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
3201 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
3205 tmp
= tcg_temp_ebb_new_i64();
3206 /* These extensions are only needed for type correctness.
3207 We may be able to do better given target specific information. */
3208 tcg_gen_extu_i32_i64(tmp
, high
);
3209 tcg_gen_extu_i32_i64(dest
, low
);
3210 /* If deposit is available, use it. Otherwise use the extra
3211 knowledge that we have of the zero-extensions above. */
3212 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
3213 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
3215 tcg_gen_shli_i64(tmp
, tmp
, 32);
3216 tcg_gen_or_i64(dest
, dest
, tmp
);
3218 tcg_temp_free_i64(tmp
);
3221 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
3223 if (TCG_TARGET_REG_BITS
== 32) {
3224 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
3225 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
3227 tcg_gen_extrl_i64_i32(lo
, arg
);
3228 tcg_gen_extrh_i64_i32(hi
, arg
);
3232 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
3234 tcg_gen_ext32u_i64(lo
, arg
);
3235 tcg_gen_shri_i64(hi
, arg
, 32);
3238 void tcg_gen_concat32_i64(TCGv_i64 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3240 tcg_gen_deposit_i64(ret
, lo
, hi
, 32, 32);
3243 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
3245 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
3246 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
3249 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3251 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
3252 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
3255 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
3258 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
3259 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
3263 void tcg_gen_ld_i128(TCGv_i128 ret
, TCGv_ptr base
, tcg_target_long offset
)
3265 if (HOST_BIG_ENDIAN
) {
3266 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
);
3267 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
+ 8);
3269 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
);
3270 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
+ 8);
3274 void tcg_gen_st_i128(TCGv_i128 val
, TCGv_ptr base
, tcg_target_long offset
)
3276 if (HOST_BIG_ENDIAN
) {
3277 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
);
3278 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
+ 8);
3280 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
);
3281 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
+ 8);
3285 /* QEMU specific operations. */
3287 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
3290 * Let the jit code return the read-only version of the
3291 * TranslationBlock, so that we minimize the pc-relative
3292 * distance of the address of the exit_tb code to TB.
3293 * This will improve utilization of pc-relative address loads.
3295 * TODO: Move this to translator_loop, so that all const
3296 * TranslationBlock pointers refer to read-only memory.
3297 * This requires coordination with targets that do not use
3298 * the translator_loop.
3300 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
3303 tcg_debug_assert(idx
== 0);
3304 } else if (idx
<= TB_EXIT_IDXMAX
) {
3305 #ifdef CONFIG_DEBUG_TCG
3306 /* This is an exit following a goto_tb. Verify that we have
3307 seen this numbered exit before, via tcg_gen_goto_tb. */
3308 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
3311 /* This is an exit via the exitreq label. */
3312 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
3315 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
3318 void tcg_gen_goto_tb(unsigned idx
)
3320 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3321 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
3322 /* We only support two chained exits. */
3323 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
3324 #ifdef CONFIG_DEBUG_TCG
3325 /* Verify that we haven't seen this numbered exit before. */
3326 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
3327 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
3329 plugin_gen_disable_mem_helpers();
3330 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
3333 void tcg_gen_lookup_and_goto_ptr(void)
3337 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
3338 tcg_gen_exit_tb(NULL
, 0);
3342 plugin_gen_disable_mem_helpers();
3343 ptr
= tcg_temp_ebb_new_ptr();
3344 gen_helper_lookup_tb_ptr(ptr
, tcg_env
);
3345 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
3346 tcg_temp_free_ptr(ptr
);