2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
28 #include "exec/exec-all.h"
32 #include "trace-tcg.h"
33 #include "trace/mem.h"
35 /* Reduce the number of ifdefs below. This assumes that all uses of
36 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
37 the compiler can eliminate. */
38 #if TCG_TARGET_REG_BITS == 64
39 extern TCGv_i32
TCGV_LOW_link_error(TCGv_i64
);
40 extern TCGv_i32
TCGV_HIGH_link_error(TCGv_i64
);
41 #define TCGV_LOW TCGV_LOW_link_error
42 #define TCGV_HIGH TCGV_HIGH_link_error
45 /* Note that this is optimized for sequential allocation during translate.
46 Up to and including filling in the forward link immediately. We'll do
47 proper termination of the end of the list after we finish translation. */
49 static inline TCGOp
*tcg_emit_op(TCGOpcode opc
)
51 TCGContext
*ctx
= tcg_ctx
;
52 int oi
= ctx
->gen_next_op_idx
;
55 TCGOp
*op
= &ctx
->gen_op_buf
[oi
];
57 tcg_debug_assert(oi
< OPC_BUF_SIZE
);
58 ctx
->gen_op_buf
[0].prev
= oi
;
59 ctx
->gen_next_op_idx
= ni
;
61 memset(op
, 0, offsetof(TCGOp
, args
));
69 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
71 TCGOp
*op
= tcg_emit_op(opc
);
75 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
77 TCGOp
*op
= tcg_emit_op(opc
);
82 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
84 TCGOp
*op
= tcg_emit_op(opc
);
90 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
92 TCGOp
*op
= tcg_emit_op(opc
);
99 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
100 TCGArg a4
, TCGArg a5
)
102 TCGOp
*op
= tcg_emit_op(opc
);
110 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
111 TCGArg a4
, TCGArg a5
, TCGArg a6
)
113 TCGOp
*op
= tcg_emit_op(opc
);
122 void tcg_gen_mb(TCGBar mb_type
)
124 if (tcg_ctx
->tb_cflags
& CF_PARALLEL
) {
125 tcg_gen_op1(INDEX_op_mb
, mb_type
);
131 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
133 /* some cases can be optimized here */
135 tcg_gen_mov_i32(ret
, arg1
);
137 TCGv_i32 t0
= tcg_const_i32(arg2
);
138 tcg_gen_add_i32(ret
, arg1
, t0
);
139 tcg_temp_free_i32(t0
);
143 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
145 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
146 /* Don't recurse with tcg_gen_neg_i32. */
147 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
149 TCGv_i32 t0
= tcg_const_i32(arg1
);
150 tcg_gen_sub_i32(ret
, t0
, arg2
);
151 tcg_temp_free_i32(t0
);
155 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
157 /* some cases can be optimized here */
159 tcg_gen_mov_i32(ret
, arg1
);
161 TCGv_i32 t0
= tcg_const_i32(arg2
);
162 tcg_gen_sub_i32(ret
, arg1
, t0
);
163 tcg_temp_free_i32(t0
);
167 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
170 /* Some cases can be optimized here. */
173 tcg_gen_movi_i32(ret
, 0);
176 tcg_gen_mov_i32(ret
, arg1
);
179 /* Don't recurse with tcg_gen_ext8u_i32. */
180 if (TCG_TARGET_HAS_ext8u_i32
) {
181 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
186 if (TCG_TARGET_HAS_ext16u_i32
) {
187 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
192 t0
= tcg_const_i32(arg2
);
193 tcg_gen_and_i32(ret
, arg1
, t0
);
194 tcg_temp_free_i32(t0
);
197 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
199 /* Some cases can be optimized here. */
201 tcg_gen_movi_i32(ret
, -1);
202 } else if (arg2
== 0) {
203 tcg_gen_mov_i32(ret
, arg1
);
205 TCGv_i32 t0
= tcg_const_i32(arg2
);
206 tcg_gen_or_i32(ret
, arg1
, t0
);
207 tcg_temp_free_i32(t0
);
211 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
213 /* Some cases can be optimized here. */
215 tcg_gen_mov_i32(ret
, arg1
);
216 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
217 /* Don't recurse with tcg_gen_not_i32. */
218 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
220 TCGv_i32 t0
= tcg_const_i32(arg2
);
221 tcg_gen_xor_i32(ret
, arg1
, t0
);
222 tcg_temp_free_i32(t0
);
226 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
228 tcg_debug_assert(arg2
< 32);
230 tcg_gen_mov_i32(ret
, arg1
);
232 TCGv_i32 t0
= tcg_const_i32(arg2
);
233 tcg_gen_shl_i32(ret
, arg1
, t0
);
234 tcg_temp_free_i32(t0
);
238 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
240 tcg_debug_assert(arg2
< 32);
242 tcg_gen_mov_i32(ret
, arg1
);
244 TCGv_i32 t0
= tcg_const_i32(arg2
);
245 tcg_gen_shr_i32(ret
, arg1
, t0
);
246 tcg_temp_free_i32(t0
);
250 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
252 tcg_debug_assert(arg2
< 32);
254 tcg_gen_mov_i32(ret
, arg1
);
256 TCGv_i32 t0
= tcg_const_i32(arg2
);
257 tcg_gen_sar_i32(ret
, arg1
, t0
);
258 tcg_temp_free_i32(t0
);
262 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
264 if (cond
== TCG_COND_ALWAYS
) {
266 } else if (cond
!= TCG_COND_NEVER
) {
267 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
271 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
273 if (cond
== TCG_COND_ALWAYS
) {
275 } else if (cond
!= TCG_COND_NEVER
) {
276 TCGv_i32 t0
= tcg_const_i32(arg2
);
277 tcg_gen_brcond_i32(cond
, arg1
, t0
, l
);
278 tcg_temp_free_i32(t0
);
282 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
283 TCGv_i32 arg1
, TCGv_i32 arg2
)
285 if (cond
== TCG_COND_ALWAYS
) {
286 tcg_gen_movi_i32(ret
, 1);
287 } else if (cond
== TCG_COND_NEVER
) {
288 tcg_gen_movi_i32(ret
, 0);
290 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
294 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
295 TCGv_i32 arg1
, int32_t arg2
)
297 TCGv_i32 t0
= tcg_const_i32(arg2
);
298 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
299 tcg_temp_free_i32(t0
);
302 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
304 TCGv_i32 t0
= tcg_const_i32(arg2
);
305 tcg_gen_mul_i32(ret
, arg1
, t0
);
306 tcg_temp_free_i32(t0
);
309 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
311 if (TCG_TARGET_HAS_div_i32
) {
312 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
313 } else if (TCG_TARGET_HAS_div2_i32
) {
314 TCGv_i32 t0
= tcg_temp_new_i32();
315 tcg_gen_sari_i32(t0
, arg1
, 31);
316 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
317 tcg_temp_free_i32(t0
);
319 gen_helper_div_i32(ret
, arg1
, arg2
);
323 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
325 if (TCG_TARGET_HAS_rem_i32
) {
326 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
327 } else if (TCG_TARGET_HAS_div_i32
) {
328 TCGv_i32 t0
= tcg_temp_new_i32();
329 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
330 tcg_gen_mul_i32(t0
, t0
, arg2
);
331 tcg_gen_sub_i32(ret
, arg1
, t0
);
332 tcg_temp_free_i32(t0
);
333 } else if (TCG_TARGET_HAS_div2_i32
) {
334 TCGv_i32 t0
= tcg_temp_new_i32();
335 tcg_gen_sari_i32(t0
, arg1
, 31);
336 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
337 tcg_temp_free_i32(t0
);
339 gen_helper_rem_i32(ret
, arg1
, arg2
);
343 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
345 if (TCG_TARGET_HAS_div_i32
) {
346 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
347 } else if (TCG_TARGET_HAS_div2_i32
) {
348 TCGv_i32 t0
= tcg_temp_new_i32();
349 tcg_gen_movi_i32(t0
, 0);
350 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
351 tcg_temp_free_i32(t0
);
353 gen_helper_divu_i32(ret
, arg1
, arg2
);
357 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
359 if (TCG_TARGET_HAS_rem_i32
) {
360 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
361 } else if (TCG_TARGET_HAS_div_i32
) {
362 TCGv_i32 t0
= tcg_temp_new_i32();
363 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
364 tcg_gen_mul_i32(t0
, t0
, arg2
);
365 tcg_gen_sub_i32(ret
, arg1
, t0
);
366 tcg_temp_free_i32(t0
);
367 } else if (TCG_TARGET_HAS_div2_i32
) {
368 TCGv_i32 t0
= tcg_temp_new_i32();
369 tcg_gen_movi_i32(t0
, 0);
370 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
371 tcg_temp_free_i32(t0
);
373 gen_helper_remu_i32(ret
, arg1
, arg2
);
377 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
379 if (TCG_TARGET_HAS_andc_i32
) {
380 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
382 TCGv_i32 t0
= tcg_temp_new_i32();
383 tcg_gen_not_i32(t0
, arg2
);
384 tcg_gen_and_i32(ret
, arg1
, t0
);
385 tcg_temp_free_i32(t0
);
389 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
391 if (TCG_TARGET_HAS_eqv_i32
) {
392 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
394 tcg_gen_xor_i32(ret
, arg1
, arg2
);
395 tcg_gen_not_i32(ret
, ret
);
399 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
401 if (TCG_TARGET_HAS_nand_i32
) {
402 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
404 tcg_gen_and_i32(ret
, arg1
, arg2
);
405 tcg_gen_not_i32(ret
, ret
);
409 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
411 if (TCG_TARGET_HAS_nor_i32
) {
412 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
414 tcg_gen_or_i32(ret
, arg1
, arg2
);
415 tcg_gen_not_i32(ret
, ret
);
419 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
421 if (TCG_TARGET_HAS_orc_i32
) {
422 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
424 TCGv_i32 t0
= tcg_temp_new_i32();
425 tcg_gen_not_i32(t0
, arg2
);
426 tcg_gen_or_i32(ret
, arg1
, t0
);
427 tcg_temp_free_i32(t0
);
431 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
433 if (TCG_TARGET_HAS_clz_i32
) {
434 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
435 } else if (TCG_TARGET_HAS_clz_i64
) {
436 TCGv_i64 t1
= tcg_temp_new_i64();
437 TCGv_i64 t2
= tcg_temp_new_i64();
438 tcg_gen_extu_i32_i64(t1
, arg1
);
439 tcg_gen_extu_i32_i64(t2
, arg2
);
440 tcg_gen_addi_i64(t2
, t2
, 32);
441 tcg_gen_clz_i64(t1
, t1
, t2
);
442 tcg_gen_extrl_i64_i32(ret
, t1
);
443 tcg_temp_free_i64(t1
);
444 tcg_temp_free_i64(t2
);
445 tcg_gen_subi_i32(ret
, ret
, 32);
447 gen_helper_clz_i32(ret
, arg1
, arg2
);
451 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
453 TCGv_i32 t
= tcg_const_i32(arg2
);
454 tcg_gen_clz_i32(ret
, arg1
, t
);
455 tcg_temp_free_i32(t
);
458 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
460 if (TCG_TARGET_HAS_ctz_i32
) {
461 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
462 } else if (TCG_TARGET_HAS_ctz_i64
) {
463 TCGv_i64 t1
= tcg_temp_new_i64();
464 TCGv_i64 t2
= tcg_temp_new_i64();
465 tcg_gen_extu_i32_i64(t1
, arg1
);
466 tcg_gen_extu_i32_i64(t2
, arg2
);
467 tcg_gen_ctz_i64(t1
, t1
, t2
);
468 tcg_gen_extrl_i64_i32(ret
, t1
);
469 tcg_temp_free_i64(t1
);
470 tcg_temp_free_i64(t2
);
471 } else if (TCG_TARGET_HAS_ctpop_i32
472 || TCG_TARGET_HAS_ctpop_i64
473 || TCG_TARGET_HAS_clz_i32
474 || TCG_TARGET_HAS_clz_i64
) {
475 TCGv_i32 z
, t
= tcg_temp_new_i32();
477 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
478 tcg_gen_subi_i32(t
, arg1
, 1);
479 tcg_gen_andc_i32(t
, t
, arg1
);
480 tcg_gen_ctpop_i32(t
, t
);
482 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
483 tcg_gen_neg_i32(t
, arg1
);
484 tcg_gen_and_i32(t
, t
, arg1
);
485 tcg_gen_clzi_i32(t
, t
, 32);
486 tcg_gen_xori_i32(t
, t
, 31);
488 z
= tcg_const_i32(0);
489 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
490 tcg_temp_free_i32(t
);
491 tcg_temp_free_i32(z
);
493 gen_helper_ctz_i32(ret
, arg1
, arg2
);
497 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
499 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
500 /* This equivalence has the advantage of not requiring a fixup. */
501 TCGv_i32 t
= tcg_temp_new_i32();
502 tcg_gen_subi_i32(t
, arg1
, 1);
503 tcg_gen_andc_i32(t
, t
, arg1
);
504 tcg_gen_ctpop_i32(ret
, t
);
505 tcg_temp_free_i32(t
);
507 TCGv_i32 t
= tcg_const_i32(arg2
);
508 tcg_gen_ctz_i32(ret
, arg1
, t
);
509 tcg_temp_free_i32(t
);
513 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
515 if (TCG_TARGET_HAS_clz_i32
) {
516 TCGv_i32 t
= tcg_temp_new_i32();
517 tcg_gen_sari_i32(t
, arg
, 31);
518 tcg_gen_xor_i32(t
, t
, arg
);
519 tcg_gen_clzi_i32(t
, t
, 32);
520 tcg_gen_subi_i32(ret
, t
, 1);
521 tcg_temp_free_i32(t
);
523 gen_helper_clrsb_i32(ret
, arg
);
527 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
529 if (TCG_TARGET_HAS_ctpop_i32
) {
530 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
531 } else if (TCG_TARGET_HAS_ctpop_i64
) {
532 TCGv_i64 t
= tcg_temp_new_i64();
533 tcg_gen_extu_i32_i64(t
, arg1
);
534 tcg_gen_ctpop_i64(t
, t
);
535 tcg_gen_extrl_i64_i32(ret
, t
);
536 tcg_temp_free_i64(t
);
538 gen_helper_ctpop_i32(ret
, arg1
);
542 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
544 if (TCG_TARGET_HAS_rot_i32
) {
545 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
549 t0
= tcg_temp_new_i32();
550 t1
= tcg_temp_new_i32();
551 tcg_gen_shl_i32(t0
, arg1
, arg2
);
552 tcg_gen_subfi_i32(t1
, 32, arg2
);
553 tcg_gen_shr_i32(t1
, arg1
, t1
);
554 tcg_gen_or_i32(ret
, t0
, t1
);
555 tcg_temp_free_i32(t0
);
556 tcg_temp_free_i32(t1
);
560 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
562 tcg_debug_assert(arg2
< 32);
563 /* some cases can be optimized here */
565 tcg_gen_mov_i32(ret
, arg1
);
566 } else if (TCG_TARGET_HAS_rot_i32
) {
567 TCGv_i32 t0
= tcg_const_i32(arg2
);
568 tcg_gen_rotl_i32(ret
, arg1
, t0
);
569 tcg_temp_free_i32(t0
);
572 t0
= tcg_temp_new_i32();
573 t1
= tcg_temp_new_i32();
574 tcg_gen_shli_i32(t0
, arg1
, arg2
);
575 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
576 tcg_gen_or_i32(ret
, t0
, t1
);
577 tcg_temp_free_i32(t0
);
578 tcg_temp_free_i32(t1
);
582 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
584 if (TCG_TARGET_HAS_rot_i32
) {
585 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
589 t0
= tcg_temp_new_i32();
590 t1
= tcg_temp_new_i32();
591 tcg_gen_shr_i32(t0
, arg1
, arg2
);
592 tcg_gen_subfi_i32(t1
, 32, arg2
);
593 tcg_gen_shl_i32(t1
, arg1
, t1
);
594 tcg_gen_or_i32(ret
, t0
, t1
);
595 tcg_temp_free_i32(t0
);
596 tcg_temp_free_i32(t1
);
600 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
602 tcg_debug_assert(arg2
< 32);
603 /* some cases can be optimized here */
605 tcg_gen_mov_i32(ret
, arg1
);
607 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
611 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
612 unsigned int ofs
, unsigned int len
)
617 tcg_debug_assert(ofs
< 32);
618 tcg_debug_assert(len
> 0);
619 tcg_debug_assert(len
<= 32);
620 tcg_debug_assert(ofs
+ len
<= 32);
623 tcg_gen_mov_i32(ret
, arg2
);
626 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
627 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
631 mask
= (1u << len
) - 1;
632 t1
= tcg_temp_new_i32();
634 if (ofs
+ len
< 32) {
635 tcg_gen_andi_i32(t1
, arg2
, mask
);
636 tcg_gen_shli_i32(t1
, t1
, ofs
);
638 tcg_gen_shli_i32(t1
, arg2
, ofs
);
640 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
641 tcg_gen_or_i32(ret
, ret
, t1
);
643 tcg_temp_free_i32(t1
);
646 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
647 unsigned int ofs
, unsigned int len
)
649 tcg_debug_assert(ofs
< 32);
650 tcg_debug_assert(len
> 0);
651 tcg_debug_assert(len
<= 32);
652 tcg_debug_assert(ofs
+ len
<= 32);
654 if (ofs
+ len
== 32) {
655 tcg_gen_shli_i32(ret
, arg
, ofs
);
656 } else if (ofs
== 0) {
657 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
658 } else if (TCG_TARGET_HAS_deposit_i32
659 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
660 TCGv_i32 zero
= tcg_const_i32(0);
661 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
662 tcg_temp_free_i32(zero
);
664 /* To help two-operand hosts we prefer to zero-extend first,
665 which allows ARG to stay live. */
668 if (TCG_TARGET_HAS_ext16u_i32
) {
669 tcg_gen_ext16u_i32(ret
, arg
);
670 tcg_gen_shli_i32(ret
, ret
, ofs
);
675 if (TCG_TARGET_HAS_ext8u_i32
) {
676 tcg_gen_ext8u_i32(ret
, arg
);
677 tcg_gen_shli_i32(ret
, ret
, ofs
);
682 /* Otherwise prefer zero-extension over AND for code size. */
685 if (TCG_TARGET_HAS_ext16u_i32
) {
686 tcg_gen_shli_i32(ret
, arg
, ofs
);
687 tcg_gen_ext16u_i32(ret
, ret
);
692 if (TCG_TARGET_HAS_ext8u_i32
) {
693 tcg_gen_shli_i32(ret
, arg
, ofs
);
694 tcg_gen_ext8u_i32(ret
, ret
);
699 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
700 tcg_gen_shli_i32(ret
, ret
, ofs
);
704 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
705 unsigned int ofs
, unsigned int len
)
707 tcg_debug_assert(ofs
< 32);
708 tcg_debug_assert(len
> 0);
709 tcg_debug_assert(len
<= 32);
710 tcg_debug_assert(ofs
+ len
<= 32);
712 /* Canonicalize certain special cases, even if extract is supported. */
713 if (ofs
+ len
== 32) {
714 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
718 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
722 if (TCG_TARGET_HAS_extract_i32
723 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
724 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
728 /* Assume that zero-extension, if available, is cheaper than a shift. */
731 if (TCG_TARGET_HAS_ext16u_i32
) {
732 tcg_gen_ext16u_i32(ret
, arg
);
733 tcg_gen_shri_i32(ret
, ret
, ofs
);
738 if (TCG_TARGET_HAS_ext8u_i32
) {
739 tcg_gen_ext8u_i32(ret
, arg
);
740 tcg_gen_shri_i32(ret
, ret
, ofs
);
746 /* ??? Ideally we'd know what values are available for immediate AND.
747 Assume that 8 bits are available, plus the special case of 16,
748 so that we get ext8u, ext16u. */
750 case 1 ... 8: case 16:
751 tcg_gen_shri_i32(ret
, arg
, ofs
);
752 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
755 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
756 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
761 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
762 unsigned int ofs
, unsigned int len
)
764 tcg_debug_assert(ofs
< 32);
765 tcg_debug_assert(len
> 0);
766 tcg_debug_assert(len
<= 32);
767 tcg_debug_assert(ofs
+ len
<= 32);
769 /* Canonicalize certain special cases, even if extract is supported. */
770 if (ofs
+ len
== 32) {
771 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
777 tcg_gen_ext16s_i32(ret
, arg
);
780 tcg_gen_ext8s_i32(ret
, arg
);
785 if (TCG_TARGET_HAS_sextract_i32
786 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
787 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
791 /* Assume that sign-extension, if available, is cheaper than a shift. */
794 if (TCG_TARGET_HAS_ext16s_i32
) {
795 tcg_gen_ext16s_i32(ret
, arg
);
796 tcg_gen_sari_i32(ret
, ret
, ofs
);
801 if (TCG_TARGET_HAS_ext8s_i32
) {
802 tcg_gen_ext8s_i32(ret
, arg
);
803 tcg_gen_sari_i32(ret
, ret
, ofs
);
810 if (TCG_TARGET_HAS_ext16s_i32
) {
811 tcg_gen_shri_i32(ret
, arg
, ofs
);
812 tcg_gen_ext16s_i32(ret
, ret
);
817 if (TCG_TARGET_HAS_ext8s_i32
) {
818 tcg_gen_shri_i32(ret
, arg
, ofs
);
819 tcg_gen_ext8s_i32(ret
, ret
);
825 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
826 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
829 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
830 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
832 if (cond
== TCG_COND_ALWAYS
) {
833 tcg_gen_mov_i32(ret
, v1
);
834 } else if (cond
== TCG_COND_NEVER
) {
835 tcg_gen_mov_i32(ret
, v2
);
836 } else if (TCG_TARGET_HAS_movcond_i32
) {
837 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
839 TCGv_i32 t0
= tcg_temp_new_i32();
840 TCGv_i32 t1
= tcg_temp_new_i32();
841 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
842 tcg_gen_neg_i32(t0
, t0
);
843 tcg_gen_and_i32(t1
, v1
, t0
);
844 tcg_gen_andc_i32(ret
, v2
, t0
);
845 tcg_gen_or_i32(ret
, ret
, t1
);
846 tcg_temp_free_i32(t0
);
847 tcg_temp_free_i32(t1
);
851 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
852 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
854 if (TCG_TARGET_HAS_add2_i32
) {
855 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
857 TCGv_i64 t0
= tcg_temp_new_i64();
858 TCGv_i64 t1
= tcg_temp_new_i64();
859 tcg_gen_concat_i32_i64(t0
, al
, ah
);
860 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
861 tcg_gen_add_i64(t0
, t0
, t1
);
862 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
863 tcg_temp_free_i64(t0
);
864 tcg_temp_free_i64(t1
);
868 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
869 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
871 if (TCG_TARGET_HAS_sub2_i32
) {
872 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
874 TCGv_i64 t0
= tcg_temp_new_i64();
875 TCGv_i64 t1
= tcg_temp_new_i64();
876 tcg_gen_concat_i32_i64(t0
, al
, ah
);
877 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
878 tcg_gen_sub_i64(t0
, t0
, t1
);
879 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
880 tcg_temp_free_i64(t0
);
881 tcg_temp_free_i64(t1
);
885 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
887 if (TCG_TARGET_HAS_mulu2_i32
) {
888 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
889 } else if (TCG_TARGET_HAS_muluh_i32
) {
890 TCGv_i32 t
= tcg_temp_new_i32();
891 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
892 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
893 tcg_gen_mov_i32(rl
, t
);
894 tcg_temp_free_i32(t
);
896 TCGv_i64 t0
= tcg_temp_new_i64();
897 TCGv_i64 t1
= tcg_temp_new_i64();
898 tcg_gen_extu_i32_i64(t0
, arg1
);
899 tcg_gen_extu_i32_i64(t1
, arg2
);
900 tcg_gen_mul_i64(t0
, t0
, t1
);
901 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
902 tcg_temp_free_i64(t0
);
903 tcg_temp_free_i64(t1
);
907 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
909 if (TCG_TARGET_HAS_muls2_i32
) {
910 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
911 } else if (TCG_TARGET_HAS_mulsh_i32
) {
912 TCGv_i32 t
= tcg_temp_new_i32();
913 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
914 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
915 tcg_gen_mov_i32(rl
, t
);
916 tcg_temp_free_i32(t
);
917 } else if (TCG_TARGET_REG_BITS
== 32) {
918 TCGv_i32 t0
= tcg_temp_new_i32();
919 TCGv_i32 t1
= tcg_temp_new_i32();
920 TCGv_i32 t2
= tcg_temp_new_i32();
921 TCGv_i32 t3
= tcg_temp_new_i32();
922 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
923 /* Adjust for negative inputs. */
924 tcg_gen_sari_i32(t2
, arg1
, 31);
925 tcg_gen_sari_i32(t3
, arg2
, 31);
926 tcg_gen_and_i32(t2
, t2
, arg2
);
927 tcg_gen_and_i32(t3
, t3
, arg1
);
928 tcg_gen_sub_i32(rh
, t1
, t2
);
929 tcg_gen_sub_i32(rh
, rh
, t3
);
930 tcg_gen_mov_i32(rl
, t0
);
931 tcg_temp_free_i32(t0
);
932 tcg_temp_free_i32(t1
);
933 tcg_temp_free_i32(t2
);
934 tcg_temp_free_i32(t3
);
936 TCGv_i64 t0
= tcg_temp_new_i64();
937 TCGv_i64 t1
= tcg_temp_new_i64();
938 tcg_gen_ext_i32_i64(t0
, arg1
);
939 tcg_gen_ext_i32_i64(t1
, arg2
);
940 tcg_gen_mul_i64(t0
, t0
, t1
);
941 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
942 tcg_temp_free_i64(t0
);
943 tcg_temp_free_i64(t1
);
947 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
949 if (TCG_TARGET_REG_BITS
== 32) {
950 TCGv_i32 t0
= tcg_temp_new_i32();
951 TCGv_i32 t1
= tcg_temp_new_i32();
952 TCGv_i32 t2
= tcg_temp_new_i32();
953 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
954 /* Adjust for negative input for the signed arg1. */
955 tcg_gen_sari_i32(t2
, arg1
, 31);
956 tcg_gen_and_i32(t2
, t2
, arg2
);
957 tcg_gen_sub_i32(rh
, t1
, t2
);
958 tcg_gen_mov_i32(rl
, t0
);
959 tcg_temp_free_i32(t0
);
960 tcg_temp_free_i32(t1
);
961 tcg_temp_free_i32(t2
);
963 TCGv_i64 t0
= tcg_temp_new_i64();
964 TCGv_i64 t1
= tcg_temp_new_i64();
965 tcg_gen_ext_i32_i64(t0
, arg1
);
966 tcg_gen_extu_i32_i64(t1
, arg2
);
967 tcg_gen_mul_i64(t0
, t0
, t1
);
968 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
969 tcg_temp_free_i64(t0
);
970 tcg_temp_free_i64(t1
);
974 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
976 if (TCG_TARGET_HAS_ext8s_i32
) {
977 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
979 tcg_gen_shli_i32(ret
, arg
, 24);
980 tcg_gen_sari_i32(ret
, ret
, 24);
984 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
986 if (TCG_TARGET_HAS_ext16s_i32
) {
987 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
989 tcg_gen_shli_i32(ret
, arg
, 16);
990 tcg_gen_sari_i32(ret
, ret
, 16);
994 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
996 if (TCG_TARGET_HAS_ext8u_i32
) {
997 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
999 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1003 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1005 if (TCG_TARGET_HAS_ext16u_i32
) {
1006 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1008 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1012 /* Note: we assume the two high bytes are set to zero */
1013 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1015 if (TCG_TARGET_HAS_bswap16_i32
) {
1016 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
1018 TCGv_i32 t0
= tcg_temp_new_i32();
1020 tcg_gen_ext8u_i32(t0
, arg
);
1021 tcg_gen_shli_i32(t0
, t0
, 8);
1022 tcg_gen_shri_i32(ret
, arg
, 8);
1023 tcg_gen_or_i32(ret
, ret
, t0
);
1024 tcg_temp_free_i32(t0
);
1028 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1030 if (TCG_TARGET_HAS_bswap32_i32
) {
1031 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
1034 t0
= tcg_temp_new_i32();
1035 t1
= tcg_temp_new_i32();
1037 tcg_gen_shli_i32(t0
, arg
, 24);
1039 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
1040 tcg_gen_shli_i32(t1
, t1
, 8);
1041 tcg_gen_or_i32(t0
, t0
, t1
);
1043 tcg_gen_shri_i32(t1
, arg
, 8);
1044 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
1045 tcg_gen_or_i32(t0
, t0
, t1
);
1047 tcg_gen_shri_i32(t1
, arg
, 24);
1048 tcg_gen_or_i32(ret
, t0
, t1
);
1049 tcg_temp_free_i32(t0
);
1050 tcg_temp_free_i32(t1
);
1056 #if TCG_TARGET_REG_BITS == 32
1057 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1059 void tcg_gen_discard_i64(TCGv_i64 arg
)
1061 tcg_gen_discard_i32(TCGV_LOW(arg
));
1062 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1065 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1067 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1068 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1071 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1073 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1074 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1077 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1079 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1080 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1083 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1085 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1086 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1089 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1091 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1092 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1095 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1097 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1098 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1101 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1103 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1104 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1107 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1109 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1110 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1113 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1115 /* Since arg2 and ret have different types,
1116 they cannot be the same temporary */
1117 #ifdef HOST_WORDS_BIGENDIAN
1118 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1119 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1121 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1122 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1126 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1128 #ifdef HOST_WORDS_BIGENDIAN
1129 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1130 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1132 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1133 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1137 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1139 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1140 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1143 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1145 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1146 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1149 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1151 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1152 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1155 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1157 gen_helper_shl_i64(ret
, arg1
, arg2
);
1160 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1162 gen_helper_shr_i64(ret
, arg1
, arg2
);
1165 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1167 gen_helper_sar_i64(ret
, arg1
, arg2
);
1170 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1175 t0
= tcg_temp_new_i64();
1176 t1
= tcg_temp_new_i32();
1178 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1179 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1181 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1182 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1183 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1184 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1186 tcg_gen_mov_i64(ret
, t0
);
1187 tcg_temp_free_i64(t0
);
1188 tcg_temp_free_i32(t1
);
1190 #endif /* TCG_TARGET_REG_SIZE == 32 */
1192 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1194 /* some cases can be optimized here */
1196 tcg_gen_mov_i64(ret
, arg1
);
1198 TCGv_i64 t0
= tcg_const_i64(arg2
);
1199 tcg_gen_add_i64(ret
, arg1
, t0
);
1200 tcg_temp_free_i64(t0
);
1204 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1206 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1207 /* Don't recurse with tcg_gen_neg_i64. */
1208 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1210 TCGv_i64 t0
= tcg_const_i64(arg1
);
1211 tcg_gen_sub_i64(ret
, t0
, arg2
);
1212 tcg_temp_free_i64(t0
);
1216 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1218 /* some cases can be optimized here */
1220 tcg_gen_mov_i64(ret
, arg1
);
1222 TCGv_i64 t0
= tcg_const_i64(arg2
);
1223 tcg_gen_sub_i64(ret
, arg1
, t0
);
1224 tcg_temp_free_i64(t0
);
1228 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1232 if (TCG_TARGET_REG_BITS
== 32) {
1233 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1234 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1238 /* Some cases can be optimized here. */
1241 tcg_gen_movi_i64(ret
, 0);
1243 case 0xffffffffffffffffull
:
1244 tcg_gen_mov_i64(ret
, arg1
);
1247 /* Don't recurse with tcg_gen_ext8u_i64. */
1248 if (TCG_TARGET_HAS_ext8u_i64
) {
1249 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1254 if (TCG_TARGET_HAS_ext16u_i64
) {
1255 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1260 if (TCG_TARGET_HAS_ext32u_i64
) {
1261 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1266 t0
= tcg_const_i64(arg2
);
1267 tcg_gen_and_i64(ret
, arg1
, t0
);
1268 tcg_temp_free_i64(t0
);
1271 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1273 if (TCG_TARGET_REG_BITS
== 32) {
1274 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1275 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1278 /* Some cases can be optimized here. */
1280 tcg_gen_movi_i64(ret
, -1);
1281 } else if (arg2
== 0) {
1282 tcg_gen_mov_i64(ret
, arg1
);
1284 TCGv_i64 t0
= tcg_const_i64(arg2
);
1285 tcg_gen_or_i64(ret
, arg1
, t0
);
1286 tcg_temp_free_i64(t0
);
1290 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1292 if (TCG_TARGET_REG_BITS
== 32) {
1293 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1294 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1297 /* Some cases can be optimized here. */
1299 tcg_gen_mov_i64(ret
, arg1
);
1300 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1301 /* Don't recurse with tcg_gen_not_i64. */
1302 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1304 TCGv_i64 t0
= tcg_const_i64(arg2
);
1305 tcg_gen_xor_i64(ret
, arg1
, t0
);
1306 tcg_temp_free_i64(t0
);
1310 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1311 unsigned c
, bool right
, bool arith
)
1313 tcg_debug_assert(c
< 64);
1315 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1316 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1317 } else if (c
>= 32) {
1321 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1322 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1324 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1325 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1328 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1329 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1334 t0
= tcg_temp_new_i32();
1335 t1
= tcg_temp_new_i32();
1337 tcg_gen_shli_i32(t0
, TCGV_HIGH(arg1
), 32 - c
);
1339 tcg_gen_sari_i32(t1
, TCGV_HIGH(arg1
), c
);
1341 tcg_gen_shri_i32(t1
, TCGV_HIGH(arg1
), c
);
1343 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1344 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t0
);
1345 tcg_gen_mov_i32(TCGV_HIGH(ret
), t1
);
1347 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1348 /* Note: ret can be the same as arg1, so we use t1 */
1349 tcg_gen_shli_i32(t1
, TCGV_LOW(arg1
), c
);
1350 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1351 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t0
);
1352 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1354 tcg_temp_free_i32(t0
);
1355 tcg_temp_free_i32(t1
);
1359 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1361 tcg_debug_assert(arg2
< 64);
1362 if (TCG_TARGET_REG_BITS
== 32) {
1363 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1364 } else if (arg2
== 0) {
1365 tcg_gen_mov_i64(ret
, arg1
);
1367 TCGv_i64 t0
= tcg_const_i64(arg2
);
1368 tcg_gen_shl_i64(ret
, arg1
, t0
);
1369 tcg_temp_free_i64(t0
);
1373 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1375 tcg_debug_assert(arg2
< 64);
1376 if (TCG_TARGET_REG_BITS
== 32) {
1377 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1378 } else if (arg2
== 0) {
1379 tcg_gen_mov_i64(ret
, arg1
);
1381 TCGv_i64 t0
= tcg_const_i64(arg2
);
1382 tcg_gen_shr_i64(ret
, arg1
, t0
);
1383 tcg_temp_free_i64(t0
);
1387 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1389 tcg_debug_assert(arg2
< 64);
1390 if (TCG_TARGET_REG_BITS
== 32) {
1391 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1392 } else if (arg2
== 0) {
1393 tcg_gen_mov_i64(ret
, arg1
);
1395 TCGv_i64 t0
= tcg_const_i64(arg2
);
1396 tcg_gen_sar_i64(ret
, arg1
, t0
);
1397 tcg_temp_free_i64(t0
);
1401 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1403 if (cond
== TCG_COND_ALWAYS
) {
1405 } else if (cond
!= TCG_COND_NEVER
) {
1406 if (TCG_TARGET_REG_BITS
== 32) {
1407 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1408 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1409 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1411 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1417 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1419 if (cond
== TCG_COND_ALWAYS
) {
1421 } else if (cond
!= TCG_COND_NEVER
) {
1422 TCGv_i64 t0
= tcg_const_i64(arg2
);
1423 tcg_gen_brcond_i64(cond
, arg1
, t0
, l
);
1424 tcg_temp_free_i64(t0
);
1428 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1429 TCGv_i64 arg1
, TCGv_i64 arg2
)
1431 if (cond
== TCG_COND_ALWAYS
) {
1432 tcg_gen_movi_i64(ret
, 1);
1433 } else if (cond
== TCG_COND_NEVER
) {
1434 tcg_gen_movi_i64(ret
, 0);
1436 if (TCG_TARGET_REG_BITS
== 32) {
1437 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1438 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1439 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1440 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1442 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1447 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1448 TCGv_i64 arg1
, int64_t arg2
)
1450 TCGv_i64 t0
= tcg_const_i64(arg2
);
1451 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1452 tcg_temp_free_i64(t0
);
1455 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1457 TCGv_i64 t0
= tcg_const_i64(arg2
);
1458 tcg_gen_mul_i64(ret
, arg1
, t0
);
1459 tcg_temp_free_i64(t0
);
1462 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1464 if (TCG_TARGET_HAS_div_i64
) {
1465 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1466 } else if (TCG_TARGET_HAS_div2_i64
) {
1467 TCGv_i64 t0
= tcg_temp_new_i64();
1468 tcg_gen_sari_i64(t0
, arg1
, 63);
1469 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1470 tcg_temp_free_i64(t0
);
1472 gen_helper_div_i64(ret
, arg1
, arg2
);
1476 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1478 if (TCG_TARGET_HAS_rem_i64
) {
1479 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1480 } else if (TCG_TARGET_HAS_div_i64
) {
1481 TCGv_i64 t0
= tcg_temp_new_i64();
1482 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1483 tcg_gen_mul_i64(t0
, t0
, arg2
);
1484 tcg_gen_sub_i64(ret
, arg1
, t0
);
1485 tcg_temp_free_i64(t0
);
1486 } else if (TCG_TARGET_HAS_div2_i64
) {
1487 TCGv_i64 t0
= tcg_temp_new_i64();
1488 tcg_gen_sari_i64(t0
, arg1
, 63);
1489 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1490 tcg_temp_free_i64(t0
);
1492 gen_helper_rem_i64(ret
, arg1
, arg2
);
1496 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1498 if (TCG_TARGET_HAS_div_i64
) {
1499 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1500 } else if (TCG_TARGET_HAS_div2_i64
) {
1501 TCGv_i64 t0
= tcg_temp_new_i64();
1502 tcg_gen_movi_i64(t0
, 0);
1503 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1504 tcg_temp_free_i64(t0
);
1506 gen_helper_divu_i64(ret
, arg1
, arg2
);
1510 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1512 if (TCG_TARGET_HAS_rem_i64
) {
1513 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1514 } else if (TCG_TARGET_HAS_div_i64
) {
1515 TCGv_i64 t0
= tcg_temp_new_i64();
1516 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1517 tcg_gen_mul_i64(t0
, t0
, arg2
);
1518 tcg_gen_sub_i64(ret
, arg1
, t0
);
1519 tcg_temp_free_i64(t0
);
1520 } else if (TCG_TARGET_HAS_div2_i64
) {
1521 TCGv_i64 t0
= tcg_temp_new_i64();
1522 tcg_gen_movi_i64(t0
, 0);
1523 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1524 tcg_temp_free_i64(t0
);
1526 gen_helper_remu_i64(ret
, arg1
, arg2
);
1530 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1532 if (TCG_TARGET_REG_BITS
== 32) {
1533 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1534 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1535 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1536 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1538 tcg_gen_shli_i64(ret
, arg
, 56);
1539 tcg_gen_sari_i64(ret
, ret
, 56);
1543 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1545 if (TCG_TARGET_REG_BITS
== 32) {
1546 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1547 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1548 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1549 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1551 tcg_gen_shli_i64(ret
, arg
, 48);
1552 tcg_gen_sari_i64(ret
, ret
, 48);
1556 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1558 if (TCG_TARGET_REG_BITS
== 32) {
1559 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1560 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1561 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1562 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1564 tcg_gen_shli_i64(ret
, arg
, 32);
1565 tcg_gen_sari_i64(ret
, ret
, 32);
1569 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1571 if (TCG_TARGET_REG_BITS
== 32) {
1572 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1573 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1574 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1575 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1577 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1581 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1583 if (TCG_TARGET_REG_BITS
== 32) {
1584 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1585 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1586 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1587 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1589 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1593 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1595 if (TCG_TARGET_REG_BITS
== 32) {
1596 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1597 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1598 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1599 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1601 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1605 /* Note: we assume the six high bytes are set to zero */
1606 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1608 if (TCG_TARGET_REG_BITS
== 32) {
1609 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1610 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1611 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1612 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1614 TCGv_i64 t0
= tcg_temp_new_i64();
1616 tcg_gen_ext8u_i64(t0
, arg
);
1617 tcg_gen_shli_i64(t0
, t0
, 8);
1618 tcg_gen_shri_i64(ret
, arg
, 8);
1619 tcg_gen_or_i64(ret
, ret
, t0
);
1620 tcg_temp_free_i64(t0
);
1624 /* Note: we assume the four high bytes are set to zero */
1625 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1627 if (TCG_TARGET_REG_BITS
== 32) {
1628 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1629 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1630 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1631 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1634 t0
= tcg_temp_new_i64();
1635 t1
= tcg_temp_new_i64();
1637 tcg_gen_shli_i64(t0
, arg
, 24);
1638 tcg_gen_ext32u_i64(t0
, t0
);
1640 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1641 tcg_gen_shli_i64(t1
, t1
, 8);
1642 tcg_gen_or_i64(t0
, t0
, t1
);
1644 tcg_gen_shri_i64(t1
, arg
, 8);
1645 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1646 tcg_gen_or_i64(t0
, t0
, t1
);
1648 tcg_gen_shri_i64(t1
, arg
, 24);
1649 tcg_gen_or_i64(ret
, t0
, t1
);
1650 tcg_temp_free_i64(t0
);
1651 tcg_temp_free_i64(t1
);
1655 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1657 if (TCG_TARGET_REG_BITS
== 32) {
1659 t0
= tcg_temp_new_i32();
1660 t1
= tcg_temp_new_i32();
1662 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1663 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1664 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1665 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1666 tcg_temp_free_i32(t0
);
1667 tcg_temp_free_i32(t1
);
1668 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1669 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1671 TCGv_i64 t0
= tcg_temp_new_i64();
1672 TCGv_i64 t1
= tcg_temp_new_i64();
1674 tcg_gen_shli_i64(t0
, arg
, 56);
1676 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1677 tcg_gen_shli_i64(t1
, t1
, 40);
1678 tcg_gen_or_i64(t0
, t0
, t1
);
1680 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1681 tcg_gen_shli_i64(t1
, t1
, 24);
1682 tcg_gen_or_i64(t0
, t0
, t1
);
1684 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1685 tcg_gen_shli_i64(t1
, t1
, 8);
1686 tcg_gen_or_i64(t0
, t0
, t1
);
1688 tcg_gen_shri_i64(t1
, arg
, 8);
1689 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1690 tcg_gen_or_i64(t0
, t0
, t1
);
1692 tcg_gen_shri_i64(t1
, arg
, 24);
1693 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1694 tcg_gen_or_i64(t0
, t0
, t1
);
1696 tcg_gen_shri_i64(t1
, arg
, 40);
1697 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1698 tcg_gen_or_i64(t0
, t0
, t1
);
1700 tcg_gen_shri_i64(t1
, arg
, 56);
1701 tcg_gen_or_i64(ret
, t0
, t1
);
1702 tcg_temp_free_i64(t0
);
1703 tcg_temp_free_i64(t1
);
1707 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1709 if (TCG_TARGET_REG_BITS
== 32) {
1710 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1711 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1712 } else if (TCG_TARGET_HAS_not_i64
) {
1713 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1715 tcg_gen_xori_i64(ret
, arg
, -1);
1719 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1721 if (TCG_TARGET_REG_BITS
== 32) {
1722 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1723 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1724 } else if (TCG_TARGET_HAS_andc_i64
) {
1725 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1727 TCGv_i64 t0
= tcg_temp_new_i64();
1728 tcg_gen_not_i64(t0
, arg2
);
1729 tcg_gen_and_i64(ret
, arg1
, t0
);
1730 tcg_temp_free_i64(t0
);
1734 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1736 if (TCG_TARGET_REG_BITS
== 32) {
1737 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1738 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1739 } else if (TCG_TARGET_HAS_eqv_i64
) {
1740 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1742 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1743 tcg_gen_not_i64(ret
, ret
);
1747 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1749 if (TCG_TARGET_REG_BITS
== 32) {
1750 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1751 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1752 } else if (TCG_TARGET_HAS_nand_i64
) {
1753 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1755 tcg_gen_and_i64(ret
, arg1
, arg2
);
1756 tcg_gen_not_i64(ret
, ret
);
1760 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1762 if (TCG_TARGET_REG_BITS
== 32) {
1763 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1764 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1765 } else if (TCG_TARGET_HAS_nor_i64
) {
1766 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1768 tcg_gen_or_i64(ret
, arg1
, arg2
);
1769 tcg_gen_not_i64(ret
, ret
);
1773 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1775 if (TCG_TARGET_REG_BITS
== 32) {
1776 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1777 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1778 } else if (TCG_TARGET_HAS_orc_i64
) {
1779 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1781 TCGv_i64 t0
= tcg_temp_new_i64();
1782 tcg_gen_not_i64(t0
, arg2
);
1783 tcg_gen_or_i64(ret
, arg1
, t0
);
1784 tcg_temp_free_i64(t0
);
1788 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1790 if (TCG_TARGET_HAS_clz_i64
) {
1791 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
1793 gen_helper_clz_i64(ret
, arg1
, arg2
);
1797 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1799 if (TCG_TARGET_REG_BITS
== 32
1800 && TCG_TARGET_HAS_clz_i32
1801 && arg2
<= 0xffffffffu
) {
1802 TCGv_i32 t
= tcg_const_i32((uint32_t)arg2
- 32);
1803 tcg_gen_clz_i32(t
, TCGV_LOW(arg1
), t
);
1804 tcg_gen_addi_i32(t
, t
, 32);
1805 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
1806 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1807 tcg_temp_free_i32(t
);
1809 TCGv_i64 t
= tcg_const_i64(arg2
);
1810 tcg_gen_clz_i64(ret
, arg1
, t
);
1811 tcg_temp_free_i64(t
);
1815 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1817 if (TCG_TARGET_HAS_ctz_i64
) {
1818 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
1819 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
1820 TCGv_i64 z
, t
= tcg_temp_new_i64();
1822 if (TCG_TARGET_HAS_ctpop_i64
) {
1823 tcg_gen_subi_i64(t
, arg1
, 1);
1824 tcg_gen_andc_i64(t
, t
, arg1
);
1825 tcg_gen_ctpop_i64(t
, t
);
1827 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
1828 tcg_gen_neg_i64(t
, arg1
);
1829 tcg_gen_and_i64(t
, t
, arg1
);
1830 tcg_gen_clzi_i64(t
, t
, 64);
1831 tcg_gen_xori_i64(t
, t
, 63);
1833 z
= tcg_const_i64(0);
1834 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
1835 tcg_temp_free_i64(t
);
1836 tcg_temp_free_i64(z
);
1838 gen_helper_ctz_i64(ret
, arg1
, arg2
);
1842 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1844 if (TCG_TARGET_REG_BITS
== 32
1845 && TCG_TARGET_HAS_ctz_i32
1846 && arg2
<= 0xffffffffu
) {
1847 TCGv_i32 t32
= tcg_const_i32((uint32_t)arg2
- 32);
1848 tcg_gen_ctz_i32(t32
, TCGV_HIGH(arg1
), t32
);
1849 tcg_gen_addi_i32(t32
, t32
, 32);
1850 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
1851 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1852 tcg_temp_free_i32(t32
);
1853 } else if (!TCG_TARGET_HAS_ctz_i64
1854 && TCG_TARGET_HAS_ctpop_i64
1856 /* This equivalence has the advantage of not requiring a fixup. */
1857 TCGv_i64 t
= tcg_temp_new_i64();
1858 tcg_gen_subi_i64(t
, arg1
, 1);
1859 tcg_gen_andc_i64(t
, t
, arg1
);
1860 tcg_gen_ctpop_i64(ret
, t
);
1861 tcg_temp_free_i64(t
);
1863 TCGv_i64 t64
= tcg_const_i64(arg2
);
1864 tcg_gen_ctz_i64(ret
, arg1
, t64
);
1865 tcg_temp_free_i64(t64
);
1869 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1871 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
1872 TCGv_i64 t
= tcg_temp_new_i64();
1873 tcg_gen_sari_i64(t
, arg
, 63);
1874 tcg_gen_xor_i64(t
, t
, arg
);
1875 tcg_gen_clzi_i64(t
, t
, 64);
1876 tcg_gen_subi_i64(ret
, t
, 1);
1877 tcg_temp_free_i64(t
);
1879 gen_helper_clrsb_i64(ret
, arg
);
1883 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
1885 if (TCG_TARGET_HAS_ctpop_i64
) {
1886 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
1887 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
1888 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1889 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1890 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
1891 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1893 gen_helper_ctpop_i64(ret
, arg1
);
1897 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1899 if (TCG_TARGET_HAS_rot_i64
) {
1900 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1903 t0
= tcg_temp_new_i64();
1904 t1
= tcg_temp_new_i64();
1905 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1906 tcg_gen_subfi_i64(t1
, 64, arg2
);
1907 tcg_gen_shr_i64(t1
, arg1
, t1
);
1908 tcg_gen_or_i64(ret
, t0
, t1
);
1909 tcg_temp_free_i64(t0
);
1910 tcg_temp_free_i64(t1
);
1914 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1916 tcg_debug_assert(arg2
< 64);
1917 /* some cases can be optimized here */
1919 tcg_gen_mov_i64(ret
, arg1
);
1920 } else if (TCG_TARGET_HAS_rot_i64
) {
1921 TCGv_i64 t0
= tcg_const_i64(arg2
);
1922 tcg_gen_rotl_i64(ret
, arg1
, t0
);
1923 tcg_temp_free_i64(t0
);
1926 t0
= tcg_temp_new_i64();
1927 t1
= tcg_temp_new_i64();
1928 tcg_gen_shli_i64(t0
, arg1
, arg2
);
1929 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
1930 tcg_gen_or_i64(ret
, t0
, t1
);
1931 tcg_temp_free_i64(t0
);
1932 tcg_temp_free_i64(t1
);
1936 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1938 if (TCG_TARGET_HAS_rot_i64
) {
1939 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
1942 t0
= tcg_temp_new_i64();
1943 t1
= tcg_temp_new_i64();
1944 tcg_gen_shr_i64(t0
, arg1
, arg2
);
1945 tcg_gen_subfi_i64(t1
, 64, arg2
);
1946 tcg_gen_shl_i64(t1
, arg1
, t1
);
1947 tcg_gen_or_i64(ret
, t0
, t1
);
1948 tcg_temp_free_i64(t0
);
1949 tcg_temp_free_i64(t1
);
1953 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1955 tcg_debug_assert(arg2
< 64);
1956 /* some cases can be optimized here */
1958 tcg_gen_mov_i64(ret
, arg1
);
1960 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
1964 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
1965 unsigned int ofs
, unsigned int len
)
1970 tcg_debug_assert(ofs
< 64);
1971 tcg_debug_assert(len
> 0);
1972 tcg_debug_assert(len
<= 64);
1973 tcg_debug_assert(ofs
+ len
<= 64);
1976 tcg_gen_mov_i64(ret
, arg2
);
1979 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
1980 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
1984 if (TCG_TARGET_REG_BITS
== 32) {
1986 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
1987 TCGV_LOW(arg2
), ofs
- 32, len
);
1988 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1991 if (ofs
+ len
<= 32) {
1992 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
1993 TCGV_LOW(arg2
), ofs
, len
);
1994 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1999 mask
= (1ull << len
) - 1;
2000 t1
= tcg_temp_new_i64();
2002 if (ofs
+ len
< 64) {
2003 tcg_gen_andi_i64(t1
, arg2
, mask
);
2004 tcg_gen_shli_i64(t1
, t1
, ofs
);
2006 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2008 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2009 tcg_gen_or_i64(ret
, ret
, t1
);
2011 tcg_temp_free_i64(t1
);
2014 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2015 unsigned int ofs
, unsigned int len
)
2017 tcg_debug_assert(ofs
< 64);
2018 tcg_debug_assert(len
> 0);
2019 tcg_debug_assert(len
<= 64);
2020 tcg_debug_assert(ofs
+ len
<= 64);
2022 if (ofs
+ len
== 64) {
2023 tcg_gen_shli_i64(ret
, arg
, ofs
);
2024 } else if (ofs
== 0) {
2025 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2026 } else if (TCG_TARGET_HAS_deposit_i64
2027 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2028 TCGv_i64 zero
= tcg_const_i64(0);
2029 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2030 tcg_temp_free_i64(zero
);
2032 if (TCG_TARGET_REG_BITS
== 32) {
2034 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2036 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2039 if (ofs
+ len
<= 32) {
2040 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2041 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2045 /* To help two-operand hosts we prefer to zero-extend first,
2046 which allows ARG to stay live. */
2049 if (TCG_TARGET_HAS_ext32u_i64
) {
2050 tcg_gen_ext32u_i64(ret
, arg
);
2051 tcg_gen_shli_i64(ret
, ret
, ofs
);
2056 if (TCG_TARGET_HAS_ext16u_i64
) {
2057 tcg_gen_ext16u_i64(ret
, arg
);
2058 tcg_gen_shli_i64(ret
, ret
, ofs
);
2063 if (TCG_TARGET_HAS_ext8u_i64
) {
2064 tcg_gen_ext8u_i64(ret
, arg
);
2065 tcg_gen_shli_i64(ret
, ret
, ofs
);
2070 /* Otherwise prefer zero-extension over AND for code size. */
2071 switch (ofs
+ len
) {
2073 if (TCG_TARGET_HAS_ext32u_i64
) {
2074 tcg_gen_shli_i64(ret
, arg
, ofs
);
2075 tcg_gen_ext32u_i64(ret
, ret
);
2080 if (TCG_TARGET_HAS_ext16u_i64
) {
2081 tcg_gen_shli_i64(ret
, arg
, ofs
);
2082 tcg_gen_ext16u_i64(ret
, ret
);
2087 if (TCG_TARGET_HAS_ext8u_i64
) {
2088 tcg_gen_shli_i64(ret
, arg
, ofs
);
2089 tcg_gen_ext8u_i64(ret
, ret
);
2094 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2095 tcg_gen_shli_i64(ret
, ret
, ofs
);
2099 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2100 unsigned int ofs
, unsigned int len
)
2102 tcg_debug_assert(ofs
< 64);
2103 tcg_debug_assert(len
> 0);
2104 tcg_debug_assert(len
<= 64);
2105 tcg_debug_assert(ofs
+ len
<= 64);
2107 /* Canonicalize certain special cases, even if extract is supported. */
2108 if (ofs
+ len
== 64) {
2109 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2113 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2117 if (TCG_TARGET_REG_BITS
== 32) {
2118 /* Look for a 32-bit extract within one of the two words. */
2120 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2121 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2124 if (ofs
+ len
<= 32) {
2125 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2126 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2129 /* The field is split across two words. One double-word
2130 shift is better than two double-word shifts. */
2134 if (TCG_TARGET_HAS_extract_i64
2135 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2136 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2140 /* Assume that zero-extension, if available, is cheaper than a shift. */
2141 switch (ofs
+ len
) {
2143 if (TCG_TARGET_HAS_ext32u_i64
) {
2144 tcg_gen_ext32u_i64(ret
, arg
);
2145 tcg_gen_shri_i64(ret
, ret
, ofs
);
2150 if (TCG_TARGET_HAS_ext16u_i64
) {
2151 tcg_gen_ext16u_i64(ret
, arg
);
2152 tcg_gen_shri_i64(ret
, ret
, ofs
);
2157 if (TCG_TARGET_HAS_ext8u_i64
) {
2158 tcg_gen_ext8u_i64(ret
, arg
);
2159 tcg_gen_shri_i64(ret
, ret
, ofs
);
2165 /* ??? Ideally we'd know what values are available for immediate AND.
2166 Assume that 8 bits are available, plus the special cases of 16 and 32,
2167 so that we get ext8u, ext16u, and ext32u. */
2169 case 1 ... 8: case 16: case 32:
2171 tcg_gen_shri_i64(ret
, arg
, ofs
);
2172 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2175 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2176 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2181 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2182 unsigned int ofs
, unsigned int len
)
2184 tcg_debug_assert(ofs
< 64);
2185 tcg_debug_assert(len
> 0);
2186 tcg_debug_assert(len
<= 64);
2187 tcg_debug_assert(ofs
+ len
<= 64);
2189 /* Canonicalize certain special cases, even if sextract is supported. */
2190 if (ofs
+ len
== 64) {
2191 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2197 tcg_gen_ext32s_i64(ret
, arg
);
2200 tcg_gen_ext16s_i64(ret
, arg
);
2203 tcg_gen_ext8s_i64(ret
, arg
);
2208 if (TCG_TARGET_REG_BITS
== 32) {
2209 /* Look for a 32-bit extract within one of the two words. */
2211 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2212 } else if (ofs
+ len
<= 32) {
2213 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2214 } else if (ofs
== 0) {
2215 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2216 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2218 } else if (len
> 32) {
2219 TCGv_i32 t
= tcg_temp_new_i32();
2220 /* Extract the bits for the high word normally. */
2221 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2222 /* Shift the field down for the low part. */
2223 tcg_gen_shri_i64(ret
, arg
, ofs
);
2224 /* Overwrite the shift into the high part. */
2225 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2226 tcg_temp_free_i32(t
);
2229 /* Shift the field down for the low part, such that the
2230 field sits at the MSB. */
2231 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2232 /* Shift the field down from the MSB, sign extending. */
2233 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2235 /* Sign-extend the field from 32 bits. */
2236 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2240 if (TCG_TARGET_HAS_sextract_i64
2241 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2242 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2246 /* Assume that sign-extension, if available, is cheaper than a shift. */
2247 switch (ofs
+ len
) {
2249 if (TCG_TARGET_HAS_ext32s_i64
) {
2250 tcg_gen_ext32s_i64(ret
, arg
);
2251 tcg_gen_sari_i64(ret
, ret
, ofs
);
2256 if (TCG_TARGET_HAS_ext16s_i64
) {
2257 tcg_gen_ext16s_i64(ret
, arg
);
2258 tcg_gen_sari_i64(ret
, ret
, ofs
);
2263 if (TCG_TARGET_HAS_ext8s_i64
) {
2264 tcg_gen_ext8s_i64(ret
, arg
);
2265 tcg_gen_sari_i64(ret
, ret
, ofs
);
2272 if (TCG_TARGET_HAS_ext32s_i64
) {
2273 tcg_gen_shri_i64(ret
, arg
, ofs
);
2274 tcg_gen_ext32s_i64(ret
, ret
);
2279 if (TCG_TARGET_HAS_ext16s_i64
) {
2280 tcg_gen_shri_i64(ret
, arg
, ofs
);
2281 tcg_gen_ext16s_i64(ret
, ret
);
2286 if (TCG_TARGET_HAS_ext8s_i64
) {
2287 tcg_gen_shri_i64(ret
, arg
, ofs
);
2288 tcg_gen_ext8s_i64(ret
, ret
);
2293 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2294 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2297 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2298 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2300 if (cond
== TCG_COND_ALWAYS
) {
2301 tcg_gen_mov_i64(ret
, v1
);
2302 } else if (cond
== TCG_COND_NEVER
) {
2303 tcg_gen_mov_i64(ret
, v2
);
2304 } else if (TCG_TARGET_REG_BITS
== 32) {
2305 TCGv_i32 t0
= tcg_temp_new_i32();
2306 TCGv_i32 t1
= tcg_temp_new_i32();
2307 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2308 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2309 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2311 if (TCG_TARGET_HAS_movcond_i32
) {
2312 tcg_gen_movi_i32(t1
, 0);
2313 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2314 TCGV_LOW(v1
), TCGV_LOW(v2
));
2315 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2316 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2318 tcg_gen_neg_i32(t0
, t0
);
2320 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2321 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2322 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2324 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2325 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2326 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2328 tcg_temp_free_i32(t0
);
2329 tcg_temp_free_i32(t1
);
2330 } else if (TCG_TARGET_HAS_movcond_i64
) {
2331 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2333 TCGv_i64 t0
= tcg_temp_new_i64();
2334 TCGv_i64 t1
= tcg_temp_new_i64();
2335 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2336 tcg_gen_neg_i64(t0
, t0
);
2337 tcg_gen_and_i64(t1
, v1
, t0
);
2338 tcg_gen_andc_i64(ret
, v2
, t0
);
2339 tcg_gen_or_i64(ret
, ret
, t1
);
2340 tcg_temp_free_i64(t0
);
2341 tcg_temp_free_i64(t1
);
2345 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2346 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2348 if (TCG_TARGET_HAS_add2_i64
) {
2349 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2351 TCGv_i64 t0
= tcg_temp_new_i64();
2352 TCGv_i64 t1
= tcg_temp_new_i64();
2353 tcg_gen_add_i64(t0
, al
, bl
);
2354 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2355 tcg_gen_add_i64(rh
, ah
, bh
);
2356 tcg_gen_add_i64(rh
, rh
, t1
);
2357 tcg_gen_mov_i64(rl
, t0
);
2358 tcg_temp_free_i64(t0
);
2359 tcg_temp_free_i64(t1
);
2363 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2364 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2366 if (TCG_TARGET_HAS_sub2_i64
) {
2367 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2369 TCGv_i64 t0
= tcg_temp_new_i64();
2370 TCGv_i64 t1
= tcg_temp_new_i64();
2371 tcg_gen_sub_i64(t0
, al
, bl
);
2372 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2373 tcg_gen_sub_i64(rh
, ah
, bh
);
2374 tcg_gen_sub_i64(rh
, rh
, t1
);
2375 tcg_gen_mov_i64(rl
, t0
);
2376 tcg_temp_free_i64(t0
);
2377 tcg_temp_free_i64(t1
);
2381 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2383 if (TCG_TARGET_HAS_mulu2_i64
) {
2384 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2385 } else if (TCG_TARGET_HAS_muluh_i64
) {
2386 TCGv_i64 t
= tcg_temp_new_i64();
2387 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2388 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2389 tcg_gen_mov_i64(rl
, t
);
2390 tcg_temp_free_i64(t
);
2392 TCGv_i64 t0
= tcg_temp_new_i64();
2393 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2394 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2395 tcg_gen_mov_i64(rl
, t0
);
2396 tcg_temp_free_i64(t0
);
2400 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2402 if (TCG_TARGET_HAS_muls2_i64
) {
2403 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2404 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2405 TCGv_i64 t
= tcg_temp_new_i64();
2406 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2407 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2408 tcg_gen_mov_i64(rl
, t
);
2409 tcg_temp_free_i64(t
);
2410 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2411 TCGv_i64 t0
= tcg_temp_new_i64();
2412 TCGv_i64 t1
= tcg_temp_new_i64();
2413 TCGv_i64 t2
= tcg_temp_new_i64();
2414 TCGv_i64 t3
= tcg_temp_new_i64();
2415 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2416 /* Adjust for negative inputs. */
2417 tcg_gen_sari_i64(t2
, arg1
, 63);
2418 tcg_gen_sari_i64(t3
, arg2
, 63);
2419 tcg_gen_and_i64(t2
, t2
, arg2
);
2420 tcg_gen_and_i64(t3
, t3
, arg1
);
2421 tcg_gen_sub_i64(rh
, t1
, t2
);
2422 tcg_gen_sub_i64(rh
, rh
, t3
);
2423 tcg_gen_mov_i64(rl
, t0
);
2424 tcg_temp_free_i64(t0
);
2425 tcg_temp_free_i64(t1
);
2426 tcg_temp_free_i64(t2
);
2427 tcg_temp_free_i64(t3
);
2429 TCGv_i64 t0
= tcg_temp_new_i64();
2430 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2431 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2432 tcg_gen_mov_i64(rl
, t0
);
2433 tcg_temp_free_i64(t0
);
2437 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2439 TCGv_i64 t0
= tcg_temp_new_i64();
2440 TCGv_i64 t1
= tcg_temp_new_i64();
2441 TCGv_i64 t2
= tcg_temp_new_i64();
2442 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2443 /* Adjust for negative input for the signed arg1. */
2444 tcg_gen_sari_i64(t2
, arg1
, 63);
2445 tcg_gen_and_i64(t2
, t2
, arg2
);
2446 tcg_gen_sub_i64(rh
, t1
, t2
);
2447 tcg_gen_mov_i64(rl
, t0
);
2448 tcg_temp_free_i64(t0
);
2449 tcg_temp_free_i64(t1
);
2450 tcg_temp_free_i64(t2
);
2453 /* Size changing operations. */
2455 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2457 if (TCG_TARGET_REG_BITS
== 32) {
2458 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2459 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
2460 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2461 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2463 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2467 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2469 if (TCG_TARGET_REG_BITS
== 32) {
2470 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2471 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
2472 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2473 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2475 TCGv_i64 t
= tcg_temp_new_i64();
2476 tcg_gen_shri_i64(t
, arg
, 32);
2477 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2478 tcg_temp_free_i64(t
);
2482 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2484 if (TCG_TARGET_REG_BITS
== 32) {
2485 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2486 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2488 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2489 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2493 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2495 if (TCG_TARGET_REG_BITS
== 32) {
2496 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2497 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2499 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2500 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2504 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2508 if (TCG_TARGET_REG_BITS
== 32) {
2509 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2510 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2514 tmp
= tcg_temp_new_i64();
2515 /* These extensions are only needed for type correctness.
2516 We may be able to do better given target specific information. */
2517 tcg_gen_extu_i32_i64(tmp
, high
);
2518 tcg_gen_extu_i32_i64(dest
, low
);
2519 /* If deposit is available, use it. Otherwise use the extra
2520 knowledge that we have of the zero-extensions above. */
2521 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2522 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2524 tcg_gen_shli_i64(tmp
, tmp
, 32);
2525 tcg_gen_or_i64(dest
, dest
, tmp
);
2527 tcg_temp_free_i64(tmp
);
2530 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2532 if (TCG_TARGET_REG_BITS
== 32) {
2533 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2534 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2536 tcg_gen_extrl_i64_i32(lo
, arg
);
2537 tcg_gen_extrh_i64_i32(hi
, arg
);
2541 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2543 tcg_gen_ext32u_i64(lo
, arg
);
2544 tcg_gen_shri_i64(hi
, arg
, 32);
2547 /* QEMU specific operations. */
2549 void tcg_gen_goto_tb(unsigned idx
)
2551 /* We only support two chained exits. */
2552 tcg_debug_assert(idx
<= 1);
2553 #ifdef CONFIG_DEBUG_TCG
2554 /* Verify that we havn't seen this numbered exit before. */
2555 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2556 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2558 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2561 void tcg_gen_lookup_and_goto_ptr(void)
2563 if (TCG_TARGET_HAS_goto_ptr
&& !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN
)) {
2564 TCGv_ptr ptr
= tcg_temp_new_ptr();
2565 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2566 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2567 tcg_temp_free_ptr(ptr
);
2573 static inline TCGMemOp
tcg_canonicalize_memop(TCGMemOp op
, bool is64
, bool st
)
2575 /* Trigger the asserts within as early as possible. */
2576 (void)get_alignment_bits(op
);
2578 switch (op
& MO_SIZE
) {
2601 static void gen_ldst_i32(TCGOpcode opc
, TCGv_i32 val
, TCGv addr
,
2602 TCGMemOp memop
, TCGArg idx
)
2604 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2605 #if TARGET_LONG_BITS == 32
2606 tcg_gen_op3i_i32(opc
, val
, addr
, oi
);
2608 if (TCG_TARGET_REG_BITS
== 32) {
2609 tcg_gen_op4i_i32(opc
, val
, TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2611 tcg_gen_op3(opc
, tcgv_i32_arg(val
), tcgv_i64_arg(addr
), oi
);
2616 static void gen_ldst_i64(TCGOpcode opc
, TCGv_i64 val
, TCGv addr
,
2617 TCGMemOp memop
, TCGArg idx
)
2619 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2620 #if TARGET_LONG_BITS == 32
2621 if (TCG_TARGET_REG_BITS
== 32) {
2622 tcg_gen_op4i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
), addr
, oi
);
2624 tcg_gen_op3(opc
, tcgv_i64_arg(val
), tcgv_i32_arg(addr
), oi
);
2627 if (TCG_TARGET_REG_BITS
== 32) {
2628 tcg_gen_op5i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
),
2629 TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2631 tcg_gen_op3i_i64(opc
, val
, addr
, oi
);
2636 static void tcg_gen_req_mo(TCGBar type
)
2638 #ifdef TCG_GUEST_DEFAULT_MO
2639 type
&= TCG_GUEST_DEFAULT_MO
;
2641 type
&= ~TCG_TARGET_DEFAULT_MO
;
2643 tcg_gen_mb(type
| TCG_BAR_SC
);
2647 void tcg_gen_qemu_ld_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2649 tcg_gen_req_mo(TCG_MO_LD_LD
| TCG_MO_ST_LD
);
2650 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2651 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2652 addr
, trace_mem_get_info(memop
, 0));
2653 gen_ldst_i32(INDEX_op_qemu_ld_i32
, val
, addr
, memop
, idx
);
2656 void tcg_gen_qemu_st_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2658 tcg_gen_req_mo(TCG_MO_LD_ST
| TCG_MO_ST_ST
);
2659 memop
= tcg_canonicalize_memop(memop
, 0, 1);
2660 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2661 addr
, trace_mem_get_info(memop
, 1));
2662 gen_ldst_i32(INDEX_op_qemu_st_i32
, val
, addr
, memop
, idx
);
2665 void tcg_gen_qemu_ld_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2667 tcg_gen_req_mo(TCG_MO_LD_LD
| TCG_MO_ST_LD
);
2668 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2669 tcg_gen_qemu_ld_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2670 if (memop
& MO_SIGN
) {
2671 tcg_gen_sari_i32(TCGV_HIGH(val
), TCGV_LOW(val
), 31);
2673 tcg_gen_movi_i32(TCGV_HIGH(val
), 0);
2678 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2679 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2680 addr
, trace_mem_get_info(memop
, 0));
2681 gen_ldst_i64(INDEX_op_qemu_ld_i64
, val
, addr
, memop
, idx
);
2684 void tcg_gen_qemu_st_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2686 tcg_gen_req_mo(TCG_MO_LD_ST
| TCG_MO_ST_ST
);
2687 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2688 tcg_gen_qemu_st_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2692 memop
= tcg_canonicalize_memop(memop
, 1, 1);
2693 trace_guest_mem_before_tcg(tcg_ctx
->cpu
, cpu_env
,
2694 addr
, trace_mem_get_info(memop
, 1));
2695 gen_ldst_i64(INDEX_op_qemu_st_i64
, val
, addr
, memop
, idx
);
2698 static void tcg_gen_ext_i32(TCGv_i32 ret
, TCGv_i32 val
, TCGMemOp opc
)
2700 switch (opc
& MO_SSIZE
) {
2702 tcg_gen_ext8s_i32(ret
, val
);
2705 tcg_gen_ext8u_i32(ret
, val
);
2708 tcg_gen_ext16s_i32(ret
, val
);
2711 tcg_gen_ext16u_i32(ret
, val
);
2714 tcg_gen_mov_i32(ret
, val
);
2719 static void tcg_gen_ext_i64(TCGv_i64 ret
, TCGv_i64 val
, TCGMemOp opc
)
2721 switch (opc
& MO_SSIZE
) {
2723 tcg_gen_ext8s_i64(ret
, val
);
2726 tcg_gen_ext8u_i64(ret
, val
);
2729 tcg_gen_ext16s_i64(ret
, val
);
2732 tcg_gen_ext16u_i64(ret
, val
);
2735 tcg_gen_ext32s_i64(ret
, val
);
2738 tcg_gen_ext32u_i64(ret
, val
);
2741 tcg_gen_mov_i64(ret
, val
);
2746 #ifdef CONFIG_SOFTMMU
2747 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2748 TCGv_i32
, TCGv_i32
, TCGv_i32
);
2749 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2750 TCGv_i64
, TCGv_i64
, TCGv_i32
);
2751 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2752 TCGv_i32
, TCGv_i32
);
2753 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2754 TCGv_i64
, TCGv_i32
);
2756 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
, TCGv_i32
);
2757 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
, TCGv_i64
);
2758 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
);
2759 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
);
2762 #ifdef CONFIG_ATOMIC64
2763 # define WITH_ATOMIC64(X) X,
2765 # define WITH_ATOMIC64(X)
2768 static void * const table_cmpxchg
[16] = {
2769 [MO_8
] = gen_helper_atomic_cmpxchgb
,
2770 [MO_16
| MO_LE
] = gen_helper_atomic_cmpxchgw_le
,
2771 [MO_16
| MO_BE
] = gen_helper_atomic_cmpxchgw_be
,
2772 [MO_32
| MO_LE
] = gen_helper_atomic_cmpxchgl_le
,
2773 [MO_32
| MO_BE
] = gen_helper_atomic_cmpxchgl_be
,
2774 WITH_ATOMIC64([MO_64
| MO_LE
] = gen_helper_atomic_cmpxchgq_le
)
2775 WITH_ATOMIC64([MO_64
| MO_BE
] = gen_helper_atomic_cmpxchgq_be
)
2778 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv
, TCGv addr
, TCGv_i32 cmpv
,
2779 TCGv_i32 newv
, TCGArg idx
, TCGMemOp memop
)
2781 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2783 if (!(tcg_ctx
->tb_cflags
& CF_PARALLEL
)) {
2784 TCGv_i32 t1
= tcg_temp_new_i32();
2785 TCGv_i32 t2
= tcg_temp_new_i32();
2787 tcg_gen_ext_i32(t2
, cmpv
, memop
& MO_SIZE
);
2789 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2790 tcg_gen_movcond_i32(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2791 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2792 tcg_temp_free_i32(t2
);
2794 if (memop
& MO_SIGN
) {
2795 tcg_gen_ext_i32(retv
, t1
, memop
);
2797 tcg_gen_mov_i32(retv
, t1
);
2799 tcg_temp_free_i32(t1
);
2801 gen_atomic_cx_i32 gen
;
2803 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2804 tcg_debug_assert(gen
!= NULL
);
2806 #ifdef CONFIG_SOFTMMU
2808 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2809 gen(retv
, cpu_env
, addr
, cmpv
, newv
, oi
);
2810 tcg_temp_free_i32(oi
);
2813 gen(retv
, cpu_env
, addr
, cmpv
, newv
);
2816 if (memop
& MO_SIGN
) {
2817 tcg_gen_ext_i32(retv
, retv
, memop
);
2822 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv
, TCGv addr
, TCGv_i64 cmpv
,
2823 TCGv_i64 newv
, TCGArg idx
, TCGMemOp memop
)
2825 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2827 if (!(tcg_ctx
->tb_cflags
& CF_PARALLEL
)) {
2828 TCGv_i64 t1
= tcg_temp_new_i64();
2829 TCGv_i64 t2
= tcg_temp_new_i64();
2831 tcg_gen_ext_i64(t2
, cmpv
, memop
& MO_SIZE
);
2833 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2834 tcg_gen_movcond_i64(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2835 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2836 tcg_temp_free_i64(t2
);
2838 if (memop
& MO_SIGN
) {
2839 tcg_gen_ext_i64(retv
, t1
, memop
);
2841 tcg_gen_mov_i64(retv
, t1
);
2843 tcg_temp_free_i64(t1
);
2844 } else if ((memop
& MO_SIZE
) == MO_64
) {
2845 #ifdef CONFIG_ATOMIC64
2846 gen_atomic_cx_i64 gen
;
2848 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2849 tcg_debug_assert(gen
!= NULL
);
2851 #ifdef CONFIG_SOFTMMU
2853 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
, idx
));
2854 gen(retv
, cpu_env
, addr
, cmpv
, newv
, oi
);
2855 tcg_temp_free_i32(oi
);
2858 gen(retv
, cpu_env
, addr
, cmpv
, newv
);
2861 gen_helper_exit_atomic(cpu_env
);
2862 /* Produce a result, so that we have a well-formed opcode stream
2863 with respect to uses of the result in the (dead) code following. */
2864 tcg_gen_movi_i64(retv
, 0);
2865 #endif /* CONFIG_ATOMIC64 */
2867 TCGv_i32 c32
= tcg_temp_new_i32();
2868 TCGv_i32 n32
= tcg_temp_new_i32();
2869 TCGv_i32 r32
= tcg_temp_new_i32();
2871 tcg_gen_extrl_i64_i32(c32
, cmpv
);
2872 tcg_gen_extrl_i64_i32(n32
, newv
);
2873 tcg_gen_atomic_cmpxchg_i32(r32
, addr
, c32
, n32
, idx
, memop
& ~MO_SIGN
);
2874 tcg_temp_free_i32(c32
);
2875 tcg_temp_free_i32(n32
);
2877 tcg_gen_extu_i32_i64(retv
, r32
);
2878 tcg_temp_free_i32(r32
);
2880 if (memop
& MO_SIGN
) {
2881 tcg_gen_ext_i64(retv
, retv
, memop
);
2886 static void do_nonatomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2887 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2888 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
2890 TCGv_i32 t1
= tcg_temp_new_i32();
2891 TCGv_i32 t2
= tcg_temp_new_i32();
2893 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2895 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2897 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2899 tcg_gen_ext_i32(ret
, (new_val
? t2
: t1
), memop
);
2900 tcg_temp_free_i32(t1
);
2901 tcg_temp_free_i32(t2
);
2904 static void do_atomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2905 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2907 gen_atomic_op_i32 gen
;
2909 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2911 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2912 tcg_debug_assert(gen
!= NULL
);
2914 #ifdef CONFIG_SOFTMMU
2916 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2917 gen(ret
, cpu_env
, addr
, val
, oi
);
2918 tcg_temp_free_i32(oi
);
2921 gen(ret
, cpu_env
, addr
, val
);
2924 if (memop
& MO_SIGN
) {
2925 tcg_gen_ext_i32(ret
, ret
, memop
);
2929 static void do_nonatomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2930 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2931 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
2933 TCGv_i64 t1
= tcg_temp_new_i64();
2934 TCGv_i64 t2
= tcg_temp_new_i64();
2936 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2938 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2940 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2942 tcg_gen_ext_i64(ret
, (new_val
? t2
: t1
), memop
);
2943 tcg_temp_free_i64(t1
);
2944 tcg_temp_free_i64(t2
);
2947 static void do_atomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2948 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2950 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2952 if ((memop
& MO_SIZE
) == MO_64
) {
2953 #ifdef CONFIG_ATOMIC64
2954 gen_atomic_op_i64 gen
;
2956 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2957 tcg_debug_assert(gen
!= NULL
);
2959 #ifdef CONFIG_SOFTMMU
2961 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2962 gen(ret
, cpu_env
, addr
, val
, oi
);
2963 tcg_temp_free_i32(oi
);
2966 gen(ret
, cpu_env
, addr
, val
);
2969 gen_helper_exit_atomic(cpu_env
);
2970 /* Produce a result, so that we have a well-formed opcode stream
2971 with respect to uses of the result in the (dead) code following. */
2972 tcg_gen_movi_i64(ret
, 0);
2973 #endif /* CONFIG_ATOMIC64 */
2975 TCGv_i32 v32
= tcg_temp_new_i32();
2976 TCGv_i32 r32
= tcg_temp_new_i32();
2978 tcg_gen_extrl_i64_i32(v32
, val
);
2979 do_atomic_op_i32(r32
, addr
, v32
, idx
, memop
& ~MO_SIGN
, table
);
2980 tcg_temp_free_i32(v32
);
2982 tcg_gen_extu_i32_i64(ret
, r32
);
2983 tcg_temp_free_i32(r32
);
2985 if (memop
& MO_SIGN
) {
2986 tcg_gen_ext_i64(ret
, ret
, memop
);
2991 #define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2992 static void * const table_##NAME[16] = { \
2993 [MO_8] = gen_helper_atomic_##NAME##b, \
2994 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2995 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2996 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2997 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
2998 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2999 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
3001 void tcg_gen_atomic_##NAME##_i32 \
3002 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
3004 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
3005 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
3007 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
3008 tcg_gen_##OP##_i32); \
3011 void tcg_gen_atomic_##NAME##_i64 \
3012 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
3014 if (tcg_ctx->tb_cflags & CF_PARALLEL) { \
3015 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
3017 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
3018 tcg_gen_##OP##_i64); \
3022 GEN_ATOMIC_HELPER(fetch_add
, add
, 0)
3023 GEN_ATOMIC_HELPER(fetch_and
, and, 0)
3024 GEN_ATOMIC_HELPER(fetch_or
, or, 0)
3025 GEN_ATOMIC_HELPER(fetch_xor
, xor, 0)
3027 GEN_ATOMIC_HELPER(add_fetch
, add
, 1)
3028 GEN_ATOMIC_HELPER(and_fetch
, and, 1)
3029 GEN_ATOMIC_HELPER(or_fetch
, or, 1)
3030 GEN_ATOMIC_HELPER(xor_fetch
, xor, 1)
3032 static void tcg_gen_mov2_i32(TCGv_i32 r
, TCGv_i32 a
, TCGv_i32 b
)
3034 tcg_gen_mov_i32(r
, b
);
3037 static void tcg_gen_mov2_i64(TCGv_i64 r
, TCGv_i64 a
, TCGv_i64 b
)
3039 tcg_gen_mov_i64(r
, b
);
3042 GEN_ATOMIC_HELPER(xchg
, mov2
, 0)
3044 #undef GEN_ATOMIC_HELPER