2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
28 #include "exec/exec-all.h"
32 /* Reduce the number of ifdefs below. This assumes that all uses of
33 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
34 the compiler can eliminate. */
35 #if TCG_TARGET_REG_BITS == 64
36 extern TCGv_i32
TCGV_LOW_link_error(TCGv_i64
);
37 extern TCGv_i32
TCGV_HIGH_link_error(TCGv_i64
);
38 #define TCGV_LOW TCGV_LOW_link_error
39 #define TCGV_HIGH TCGV_HIGH_link_error
42 /* Note that this is optimized for sequential allocation during translate.
43 Up to and including filling in the forward link immediately. We'll do
44 proper termination of the end of the list after we finish translation. */
46 static void tcg_emit_op(TCGContext
*ctx
, TCGOpcode opc
, int args
)
48 int oi
= ctx
->gen_next_op_idx
;
52 tcg_debug_assert(oi
< OPC_BUF_SIZE
);
53 ctx
->gen_last_op_idx
= oi
;
54 ctx
->gen_next_op_idx
= ni
;
56 ctx
->gen_op_buf
[oi
] = (TCGOp
){
64 void tcg_gen_op1(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
)
66 int pi
= ctx
->gen_next_parm_idx
;
68 tcg_debug_assert(pi
+ 1 <= OPPARAM_BUF_SIZE
);
69 ctx
->gen_next_parm_idx
= pi
+ 1;
70 ctx
->gen_opparam_buf
[pi
] = a1
;
72 tcg_emit_op(ctx
, opc
, pi
);
75 void tcg_gen_op2(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
77 int pi
= ctx
->gen_next_parm_idx
;
79 tcg_debug_assert(pi
+ 2 <= OPPARAM_BUF_SIZE
);
80 ctx
->gen_next_parm_idx
= pi
+ 2;
81 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
82 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
84 tcg_emit_op(ctx
, opc
, pi
);
87 void tcg_gen_op3(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
90 int pi
= ctx
->gen_next_parm_idx
;
92 tcg_debug_assert(pi
+ 3 <= OPPARAM_BUF_SIZE
);
93 ctx
->gen_next_parm_idx
= pi
+ 3;
94 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
95 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
96 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
98 tcg_emit_op(ctx
, opc
, pi
);
101 void tcg_gen_op4(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
102 TCGArg a2
, TCGArg a3
, TCGArg a4
)
104 int pi
= ctx
->gen_next_parm_idx
;
106 tcg_debug_assert(pi
+ 4 <= OPPARAM_BUF_SIZE
);
107 ctx
->gen_next_parm_idx
= pi
+ 4;
108 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
109 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
110 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
111 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
113 tcg_emit_op(ctx
, opc
, pi
);
116 void tcg_gen_op5(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
117 TCGArg a2
, TCGArg a3
, TCGArg a4
, TCGArg a5
)
119 int pi
= ctx
->gen_next_parm_idx
;
121 tcg_debug_assert(pi
+ 5 <= OPPARAM_BUF_SIZE
);
122 ctx
->gen_next_parm_idx
= pi
+ 5;
123 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
124 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
125 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
126 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
127 ctx
->gen_opparam_buf
[pi
+ 4] = a5
;
129 tcg_emit_op(ctx
, opc
, pi
);
132 void tcg_gen_op6(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
, TCGArg a2
,
133 TCGArg a3
, TCGArg a4
, TCGArg a5
, TCGArg a6
)
135 int pi
= ctx
->gen_next_parm_idx
;
137 tcg_debug_assert(pi
+ 6 <= OPPARAM_BUF_SIZE
);
138 ctx
->gen_next_parm_idx
= pi
+ 6;
139 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
140 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
141 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
142 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
143 ctx
->gen_opparam_buf
[pi
+ 4] = a5
;
144 ctx
->gen_opparam_buf
[pi
+ 5] = a6
;
146 tcg_emit_op(ctx
, opc
, pi
);
151 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
153 /* some cases can be optimized here */
155 tcg_gen_mov_i32(ret
, arg1
);
157 TCGv_i32 t0
= tcg_const_i32(arg2
);
158 tcg_gen_add_i32(ret
, arg1
, t0
);
159 tcg_temp_free_i32(t0
);
163 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
165 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
166 /* Don't recurse with tcg_gen_neg_i32. */
167 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
169 TCGv_i32 t0
= tcg_const_i32(arg1
);
170 tcg_gen_sub_i32(ret
, t0
, arg2
);
171 tcg_temp_free_i32(t0
);
175 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
177 /* some cases can be optimized here */
179 tcg_gen_mov_i32(ret
, arg1
);
181 TCGv_i32 t0
= tcg_const_i32(arg2
);
182 tcg_gen_sub_i32(ret
, arg1
, t0
);
183 tcg_temp_free_i32(t0
);
187 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
190 /* Some cases can be optimized here. */
193 tcg_gen_movi_i32(ret
, 0);
196 tcg_gen_mov_i32(ret
, arg1
);
199 /* Don't recurse with tcg_gen_ext8u_i32. */
200 if (TCG_TARGET_HAS_ext8u_i32
) {
201 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
206 if (TCG_TARGET_HAS_ext16u_i32
) {
207 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
212 t0
= tcg_const_i32(arg2
);
213 tcg_gen_and_i32(ret
, arg1
, t0
);
214 tcg_temp_free_i32(t0
);
217 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
219 /* Some cases can be optimized here. */
221 tcg_gen_movi_i32(ret
, -1);
222 } else if (arg2
== 0) {
223 tcg_gen_mov_i32(ret
, arg1
);
225 TCGv_i32 t0
= tcg_const_i32(arg2
);
226 tcg_gen_or_i32(ret
, arg1
, t0
);
227 tcg_temp_free_i32(t0
);
231 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
233 /* Some cases can be optimized here. */
235 tcg_gen_mov_i32(ret
, arg1
);
236 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
237 /* Don't recurse with tcg_gen_not_i32. */
238 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
240 TCGv_i32 t0
= tcg_const_i32(arg2
);
241 tcg_gen_xor_i32(ret
, arg1
, t0
);
242 tcg_temp_free_i32(t0
);
246 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
248 tcg_debug_assert(arg2
< 32);
250 tcg_gen_mov_i32(ret
, arg1
);
252 TCGv_i32 t0
= tcg_const_i32(arg2
);
253 tcg_gen_shl_i32(ret
, arg1
, t0
);
254 tcg_temp_free_i32(t0
);
258 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
260 tcg_debug_assert(arg2
< 32);
262 tcg_gen_mov_i32(ret
, arg1
);
264 TCGv_i32 t0
= tcg_const_i32(arg2
);
265 tcg_gen_shr_i32(ret
, arg1
, t0
);
266 tcg_temp_free_i32(t0
);
270 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
272 tcg_debug_assert(arg2
< 32);
274 tcg_gen_mov_i32(ret
, arg1
);
276 TCGv_i32 t0
= tcg_const_i32(arg2
);
277 tcg_gen_sar_i32(ret
, arg1
, t0
);
278 tcg_temp_free_i32(t0
);
282 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
284 if (cond
== TCG_COND_ALWAYS
) {
286 } else if (cond
!= TCG_COND_NEVER
) {
287 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
291 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
293 if (cond
== TCG_COND_ALWAYS
) {
295 } else if (cond
!= TCG_COND_NEVER
) {
296 TCGv_i32 t0
= tcg_const_i32(arg2
);
297 tcg_gen_brcond_i32(cond
, arg1
, t0
, l
);
298 tcg_temp_free_i32(t0
);
302 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
303 TCGv_i32 arg1
, TCGv_i32 arg2
)
305 if (cond
== TCG_COND_ALWAYS
) {
306 tcg_gen_movi_i32(ret
, 1);
307 } else if (cond
== TCG_COND_NEVER
) {
308 tcg_gen_movi_i32(ret
, 0);
310 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
314 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
315 TCGv_i32 arg1
, int32_t arg2
)
317 TCGv_i32 t0
= tcg_const_i32(arg2
);
318 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
319 tcg_temp_free_i32(t0
);
322 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
324 TCGv_i32 t0
= tcg_const_i32(arg2
);
325 tcg_gen_mul_i32(ret
, arg1
, t0
);
326 tcg_temp_free_i32(t0
);
329 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
331 if (TCG_TARGET_HAS_div_i32
) {
332 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
333 } else if (TCG_TARGET_HAS_div2_i32
) {
334 TCGv_i32 t0
= tcg_temp_new_i32();
335 tcg_gen_sari_i32(t0
, arg1
, 31);
336 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
337 tcg_temp_free_i32(t0
);
339 gen_helper_div_i32(ret
, arg1
, arg2
);
343 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
345 if (TCG_TARGET_HAS_rem_i32
) {
346 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
347 } else if (TCG_TARGET_HAS_div_i32
) {
348 TCGv_i32 t0
= tcg_temp_new_i32();
349 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
350 tcg_gen_mul_i32(t0
, t0
, arg2
);
351 tcg_gen_sub_i32(ret
, arg1
, t0
);
352 tcg_temp_free_i32(t0
);
353 } else if (TCG_TARGET_HAS_div2_i32
) {
354 TCGv_i32 t0
= tcg_temp_new_i32();
355 tcg_gen_sari_i32(t0
, arg1
, 31);
356 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
357 tcg_temp_free_i32(t0
);
359 gen_helper_rem_i32(ret
, arg1
, arg2
);
363 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
365 if (TCG_TARGET_HAS_div_i32
) {
366 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
367 } else if (TCG_TARGET_HAS_div2_i32
) {
368 TCGv_i32 t0
= tcg_temp_new_i32();
369 tcg_gen_movi_i32(t0
, 0);
370 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
371 tcg_temp_free_i32(t0
);
373 gen_helper_divu_i32(ret
, arg1
, arg2
);
377 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
379 if (TCG_TARGET_HAS_rem_i32
) {
380 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
381 } else if (TCG_TARGET_HAS_div_i32
) {
382 TCGv_i32 t0
= tcg_temp_new_i32();
383 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
384 tcg_gen_mul_i32(t0
, t0
, arg2
);
385 tcg_gen_sub_i32(ret
, arg1
, t0
);
386 tcg_temp_free_i32(t0
);
387 } else if (TCG_TARGET_HAS_div2_i32
) {
388 TCGv_i32 t0
= tcg_temp_new_i32();
389 tcg_gen_movi_i32(t0
, 0);
390 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
391 tcg_temp_free_i32(t0
);
393 gen_helper_remu_i32(ret
, arg1
, arg2
);
397 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
399 if (TCG_TARGET_HAS_andc_i32
) {
400 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
402 TCGv_i32 t0
= tcg_temp_new_i32();
403 tcg_gen_not_i32(t0
, arg2
);
404 tcg_gen_and_i32(ret
, arg1
, t0
);
405 tcg_temp_free_i32(t0
);
409 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
411 if (TCG_TARGET_HAS_eqv_i32
) {
412 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
414 tcg_gen_xor_i32(ret
, arg1
, arg2
);
415 tcg_gen_not_i32(ret
, ret
);
419 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
421 if (TCG_TARGET_HAS_nand_i32
) {
422 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
424 tcg_gen_and_i32(ret
, arg1
, arg2
);
425 tcg_gen_not_i32(ret
, ret
);
429 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
431 if (TCG_TARGET_HAS_nor_i32
) {
432 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
434 tcg_gen_or_i32(ret
, arg1
, arg2
);
435 tcg_gen_not_i32(ret
, ret
);
439 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
441 if (TCG_TARGET_HAS_orc_i32
) {
442 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
444 TCGv_i32 t0
= tcg_temp_new_i32();
445 tcg_gen_not_i32(t0
, arg2
);
446 tcg_gen_or_i32(ret
, arg1
, t0
);
447 tcg_temp_free_i32(t0
);
451 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
453 if (TCG_TARGET_HAS_rot_i32
) {
454 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
458 t0
= tcg_temp_new_i32();
459 t1
= tcg_temp_new_i32();
460 tcg_gen_shl_i32(t0
, arg1
, arg2
);
461 tcg_gen_subfi_i32(t1
, 32, arg2
);
462 tcg_gen_shr_i32(t1
, arg1
, t1
);
463 tcg_gen_or_i32(ret
, t0
, t1
);
464 tcg_temp_free_i32(t0
);
465 tcg_temp_free_i32(t1
);
469 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
471 tcg_debug_assert(arg2
< 32);
472 /* some cases can be optimized here */
474 tcg_gen_mov_i32(ret
, arg1
);
475 } else if (TCG_TARGET_HAS_rot_i32
) {
476 TCGv_i32 t0
= tcg_const_i32(arg2
);
477 tcg_gen_rotl_i32(ret
, arg1
, t0
);
478 tcg_temp_free_i32(t0
);
481 t0
= tcg_temp_new_i32();
482 t1
= tcg_temp_new_i32();
483 tcg_gen_shli_i32(t0
, arg1
, arg2
);
484 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
485 tcg_gen_or_i32(ret
, t0
, t1
);
486 tcg_temp_free_i32(t0
);
487 tcg_temp_free_i32(t1
);
491 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
493 if (TCG_TARGET_HAS_rot_i32
) {
494 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
498 t0
= tcg_temp_new_i32();
499 t1
= tcg_temp_new_i32();
500 tcg_gen_shr_i32(t0
, arg1
, arg2
);
501 tcg_gen_subfi_i32(t1
, 32, arg2
);
502 tcg_gen_shl_i32(t1
, arg1
, t1
);
503 tcg_gen_or_i32(ret
, t0
, t1
);
504 tcg_temp_free_i32(t0
);
505 tcg_temp_free_i32(t1
);
509 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
511 tcg_debug_assert(arg2
< 32);
512 /* some cases can be optimized here */
514 tcg_gen_mov_i32(ret
, arg1
);
516 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
520 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
521 unsigned int ofs
, unsigned int len
)
526 tcg_debug_assert(ofs
< 32);
527 tcg_debug_assert(len
<= 32);
528 tcg_debug_assert(ofs
+ len
<= 32);
530 if (ofs
== 0 && len
== 32) {
531 tcg_gen_mov_i32(ret
, arg2
);
534 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
535 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
539 mask
= (1u << len
) - 1;
540 t1
= tcg_temp_new_i32();
542 if (ofs
+ len
< 32) {
543 tcg_gen_andi_i32(t1
, arg2
, mask
);
544 tcg_gen_shli_i32(t1
, t1
, ofs
);
546 tcg_gen_shli_i32(t1
, arg2
, ofs
);
548 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
549 tcg_gen_or_i32(ret
, ret
, t1
);
551 tcg_temp_free_i32(t1
);
554 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
555 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
557 if (cond
== TCG_COND_ALWAYS
) {
558 tcg_gen_mov_i32(ret
, v1
);
559 } else if (cond
== TCG_COND_NEVER
) {
560 tcg_gen_mov_i32(ret
, v2
);
561 } else if (TCG_TARGET_HAS_movcond_i32
) {
562 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
564 TCGv_i32 t0
= tcg_temp_new_i32();
565 TCGv_i32 t1
= tcg_temp_new_i32();
566 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
567 tcg_gen_neg_i32(t0
, t0
);
568 tcg_gen_and_i32(t1
, v1
, t0
);
569 tcg_gen_andc_i32(ret
, v2
, t0
);
570 tcg_gen_or_i32(ret
, ret
, t1
);
571 tcg_temp_free_i32(t0
);
572 tcg_temp_free_i32(t1
);
576 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
577 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
579 if (TCG_TARGET_HAS_add2_i32
) {
580 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
582 TCGv_i64 t0
= tcg_temp_new_i64();
583 TCGv_i64 t1
= tcg_temp_new_i64();
584 tcg_gen_concat_i32_i64(t0
, al
, ah
);
585 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
586 tcg_gen_add_i64(t0
, t0
, t1
);
587 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
588 tcg_temp_free_i64(t0
);
589 tcg_temp_free_i64(t1
);
593 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
594 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
596 if (TCG_TARGET_HAS_sub2_i32
) {
597 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
599 TCGv_i64 t0
= tcg_temp_new_i64();
600 TCGv_i64 t1
= tcg_temp_new_i64();
601 tcg_gen_concat_i32_i64(t0
, al
, ah
);
602 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
603 tcg_gen_sub_i64(t0
, t0
, t1
);
604 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
605 tcg_temp_free_i64(t0
);
606 tcg_temp_free_i64(t1
);
610 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
612 if (TCG_TARGET_HAS_mulu2_i32
) {
613 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
614 } else if (TCG_TARGET_HAS_muluh_i32
) {
615 TCGv_i32 t
= tcg_temp_new_i32();
616 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
617 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
618 tcg_gen_mov_i32(rl
, t
);
619 tcg_temp_free_i32(t
);
621 TCGv_i64 t0
= tcg_temp_new_i64();
622 TCGv_i64 t1
= tcg_temp_new_i64();
623 tcg_gen_extu_i32_i64(t0
, arg1
);
624 tcg_gen_extu_i32_i64(t1
, arg2
);
625 tcg_gen_mul_i64(t0
, t0
, t1
);
626 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
627 tcg_temp_free_i64(t0
);
628 tcg_temp_free_i64(t1
);
632 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
634 if (TCG_TARGET_HAS_muls2_i32
) {
635 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
636 } else if (TCG_TARGET_HAS_mulsh_i32
) {
637 TCGv_i32 t
= tcg_temp_new_i32();
638 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
639 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
640 tcg_gen_mov_i32(rl
, t
);
641 tcg_temp_free_i32(t
);
642 } else if (TCG_TARGET_REG_BITS
== 32) {
643 TCGv_i32 t0
= tcg_temp_new_i32();
644 TCGv_i32 t1
= tcg_temp_new_i32();
645 TCGv_i32 t2
= tcg_temp_new_i32();
646 TCGv_i32 t3
= tcg_temp_new_i32();
647 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
648 /* Adjust for negative inputs. */
649 tcg_gen_sari_i32(t2
, arg1
, 31);
650 tcg_gen_sari_i32(t3
, arg2
, 31);
651 tcg_gen_and_i32(t2
, t2
, arg2
);
652 tcg_gen_and_i32(t3
, t3
, arg1
);
653 tcg_gen_sub_i32(rh
, t1
, t2
);
654 tcg_gen_sub_i32(rh
, rh
, t3
);
655 tcg_gen_mov_i32(rl
, t0
);
656 tcg_temp_free_i32(t0
);
657 tcg_temp_free_i32(t1
);
658 tcg_temp_free_i32(t2
);
659 tcg_temp_free_i32(t3
);
661 TCGv_i64 t0
= tcg_temp_new_i64();
662 TCGv_i64 t1
= tcg_temp_new_i64();
663 tcg_gen_ext_i32_i64(t0
, arg1
);
664 tcg_gen_ext_i32_i64(t1
, arg2
);
665 tcg_gen_mul_i64(t0
, t0
, t1
);
666 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
667 tcg_temp_free_i64(t0
);
668 tcg_temp_free_i64(t1
);
672 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
674 if (TCG_TARGET_HAS_ext8s_i32
) {
675 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
677 tcg_gen_shli_i32(ret
, arg
, 24);
678 tcg_gen_sari_i32(ret
, ret
, 24);
682 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
684 if (TCG_TARGET_HAS_ext16s_i32
) {
685 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
687 tcg_gen_shli_i32(ret
, arg
, 16);
688 tcg_gen_sari_i32(ret
, ret
, 16);
692 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
694 if (TCG_TARGET_HAS_ext8u_i32
) {
695 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
697 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
701 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
703 if (TCG_TARGET_HAS_ext16u_i32
) {
704 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
706 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
710 /* Note: we assume the two high bytes are set to zero */
711 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
713 if (TCG_TARGET_HAS_bswap16_i32
) {
714 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
716 TCGv_i32 t0
= tcg_temp_new_i32();
718 tcg_gen_ext8u_i32(t0
, arg
);
719 tcg_gen_shli_i32(t0
, t0
, 8);
720 tcg_gen_shri_i32(ret
, arg
, 8);
721 tcg_gen_or_i32(ret
, ret
, t0
);
722 tcg_temp_free_i32(t0
);
726 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
728 if (TCG_TARGET_HAS_bswap32_i32
) {
729 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
732 t0
= tcg_temp_new_i32();
733 t1
= tcg_temp_new_i32();
735 tcg_gen_shli_i32(t0
, arg
, 24);
737 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
738 tcg_gen_shli_i32(t1
, t1
, 8);
739 tcg_gen_or_i32(t0
, t0
, t1
);
741 tcg_gen_shri_i32(t1
, arg
, 8);
742 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
743 tcg_gen_or_i32(t0
, t0
, t1
);
745 tcg_gen_shri_i32(t1
, arg
, 24);
746 tcg_gen_or_i32(ret
, t0
, t1
);
747 tcg_temp_free_i32(t0
);
748 tcg_temp_free_i32(t1
);
754 #if TCG_TARGET_REG_BITS == 32
755 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
757 void tcg_gen_discard_i64(TCGv_i64 arg
)
759 tcg_gen_discard_i32(TCGV_LOW(arg
));
760 tcg_gen_discard_i32(TCGV_HIGH(arg
));
763 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
765 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
766 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
769 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
771 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
772 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
775 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
777 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
778 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
781 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
783 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
784 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), 31);
787 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
789 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
790 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
793 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
795 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
796 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
799 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
801 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
802 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
805 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
807 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
808 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
811 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
813 /* Since arg2 and ret have different types,
814 they cannot be the same temporary */
815 #ifdef HOST_WORDS_BIGENDIAN
816 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
817 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
819 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
820 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
824 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
826 #ifdef HOST_WORDS_BIGENDIAN
827 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
828 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
830 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
831 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
835 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
837 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
838 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
841 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
843 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
844 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
847 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
849 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
850 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
853 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
855 gen_helper_shl_i64(ret
, arg1
, arg2
);
858 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
860 gen_helper_shr_i64(ret
, arg1
, arg2
);
863 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
865 gen_helper_sar_i64(ret
, arg1
, arg2
);
868 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
873 t0
= tcg_temp_new_i64();
874 t1
= tcg_temp_new_i32();
876 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
877 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
879 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
880 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
881 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
882 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
884 tcg_gen_mov_i64(ret
, t0
);
885 tcg_temp_free_i64(t0
);
886 tcg_temp_free_i32(t1
);
888 #endif /* TCG_TARGET_REG_SIZE == 32 */
890 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
892 /* some cases can be optimized here */
894 tcg_gen_mov_i64(ret
, arg1
);
896 TCGv_i64 t0
= tcg_const_i64(arg2
);
897 tcg_gen_add_i64(ret
, arg1
, t0
);
898 tcg_temp_free_i64(t0
);
902 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
904 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
905 /* Don't recurse with tcg_gen_neg_i64. */
906 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
908 TCGv_i64 t0
= tcg_const_i64(arg1
);
909 tcg_gen_sub_i64(ret
, t0
, arg2
);
910 tcg_temp_free_i64(t0
);
914 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
916 /* some cases can be optimized here */
918 tcg_gen_mov_i64(ret
, arg1
);
920 TCGv_i64 t0
= tcg_const_i64(arg2
);
921 tcg_gen_sub_i64(ret
, arg1
, t0
);
922 tcg_temp_free_i64(t0
);
926 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
930 if (TCG_TARGET_REG_BITS
== 32) {
931 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
932 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
936 /* Some cases can be optimized here. */
939 tcg_gen_movi_i64(ret
, 0);
941 case 0xffffffffffffffffull
:
942 tcg_gen_mov_i64(ret
, arg1
);
945 /* Don't recurse with tcg_gen_ext8u_i64. */
946 if (TCG_TARGET_HAS_ext8u_i64
) {
947 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
952 if (TCG_TARGET_HAS_ext16u_i64
) {
953 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
958 if (TCG_TARGET_HAS_ext32u_i64
) {
959 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
964 t0
= tcg_const_i64(arg2
);
965 tcg_gen_and_i64(ret
, arg1
, t0
);
966 tcg_temp_free_i64(t0
);
969 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
971 if (TCG_TARGET_REG_BITS
== 32) {
972 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
973 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
976 /* Some cases can be optimized here. */
978 tcg_gen_movi_i64(ret
, -1);
979 } else if (arg2
== 0) {
980 tcg_gen_mov_i64(ret
, arg1
);
982 TCGv_i64 t0
= tcg_const_i64(arg2
);
983 tcg_gen_or_i64(ret
, arg1
, t0
);
984 tcg_temp_free_i64(t0
);
988 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
990 if (TCG_TARGET_REG_BITS
== 32) {
991 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
992 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
995 /* Some cases can be optimized here. */
997 tcg_gen_mov_i64(ret
, arg1
);
998 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
999 /* Don't recurse with tcg_gen_not_i64. */
1000 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1002 TCGv_i64 t0
= tcg_const_i64(arg2
);
1003 tcg_gen_xor_i64(ret
, arg1
, t0
);
1004 tcg_temp_free_i64(t0
);
1008 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1009 unsigned c
, bool right
, bool arith
)
1011 tcg_debug_assert(c
< 64);
1013 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1014 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1015 } else if (c
>= 32) {
1019 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1020 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1022 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1023 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1026 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1027 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1032 t0
= tcg_temp_new_i32();
1033 t1
= tcg_temp_new_i32();
1035 tcg_gen_shli_i32(t0
, TCGV_HIGH(arg1
), 32 - c
);
1037 tcg_gen_sari_i32(t1
, TCGV_HIGH(arg1
), c
);
1039 tcg_gen_shri_i32(t1
, TCGV_HIGH(arg1
), c
);
1041 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1042 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t0
);
1043 tcg_gen_mov_i32(TCGV_HIGH(ret
), t1
);
1045 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1046 /* Note: ret can be the same as arg1, so we use t1 */
1047 tcg_gen_shli_i32(t1
, TCGV_LOW(arg1
), c
);
1048 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1049 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t0
);
1050 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1052 tcg_temp_free_i32(t0
);
1053 tcg_temp_free_i32(t1
);
1057 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1059 tcg_debug_assert(arg2
< 64);
1060 if (TCG_TARGET_REG_BITS
== 32) {
1061 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1062 } else if (arg2
== 0) {
1063 tcg_gen_mov_i64(ret
, arg1
);
1065 TCGv_i64 t0
= tcg_const_i64(arg2
);
1066 tcg_gen_shl_i64(ret
, arg1
, t0
);
1067 tcg_temp_free_i64(t0
);
1071 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1073 tcg_debug_assert(arg2
< 64);
1074 if (TCG_TARGET_REG_BITS
== 32) {
1075 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1076 } else if (arg2
== 0) {
1077 tcg_gen_mov_i64(ret
, arg1
);
1079 TCGv_i64 t0
= tcg_const_i64(arg2
);
1080 tcg_gen_shr_i64(ret
, arg1
, t0
);
1081 tcg_temp_free_i64(t0
);
1085 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1087 tcg_debug_assert(arg2
< 64);
1088 if (TCG_TARGET_REG_BITS
== 32) {
1089 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1090 } else if (arg2
== 0) {
1091 tcg_gen_mov_i64(ret
, arg1
);
1093 TCGv_i64 t0
= tcg_const_i64(arg2
);
1094 tcg_gen_sar_i64(ret
, arg1
, t0
);
1095 tcg_temp_free_i64(t0
);
1099 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1101 if (cond
== TCG_COND_ALWAYS
) {
1103 } else if (cond
!= TCG_COND_NEVER
) {
1104 if (TCG_TARGET_REG_BITS
== 32) {
1105 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1106 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1107 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1109 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1115 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1117 if (cond
== TCG_COND_ALWAYS
) {
1119 } else if (cond
!= TCG_COND_NEVER
) {
1120 TCGv_i64 t0
= tcg_const_i64(arg2
);
1121 tcg_gen_brcond_i64(cond
, arg1
, t0
, l
);
1122 tcg_temp_free_i64(t0
);
1126 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1127 TCGv_i64 arg1
, TCGv_i64 arg2
)
1129 if (cond
== TCG_COND_ALWAYS
) {
1130 tcg_gen_movi_i64(ret
, 1);
1131 } else if (cond
== TCG_COND_NEVER
) {
1132 tcg_gen_movi_i64(ret
, 0);
1134 if (TCG_TARGET_REG_BITS
== 32) {
1135 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1136 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1137 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1138 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1140 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1145 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1146 TCGv_i64 arg1
, int64_t arg2
)
1148 TCGv_i64 t0
= tcg_const_i64(arg2
);
1149 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1150 tcg_temp_free_i64(t0
);
1153 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1155 TCGv_i64 t0
= tcg_const_i64(arg2
);
1156 tcg_gen_mul_i64(ret
, arg1
, t0
);
1157 tcg_temp_free_i64(t0
);
1160 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1162 if (TCG_TARGET_HAS_div_i64
) {
1163 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1164 } else if (TCG_TARGET_HAS_div2_i64
) {
1165 TCGv_i64 t0
= tcg_temp_new_i64();
1166 tcg_gen_sari_i64(t0
, arg1
, 63);
1167 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1168 tcg_temp_free_i64(t0
);
1170 gen_helper_div_i64(ret
, arg1
, arg2
);
1174 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1176 if (TCG_TARGET_HAS_rem_i64
) {
1177 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1178 } else if (TCG_TARGET_HAS_div_i64
) {
1179 TCGv_i64 t0
= tcg_temp_new_i64();
1180 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1181 tcg_gen_mul_i64(t0
, t0
, arg2
);
1182 tcg_gen_sub_i64(ret
, arg1
, t0
);
1183 tcg_temp_free_i64(t0
);
1184 } else if (TCG_TARGET_HAS_div2_i64
) {
1185 TCGv_i64 t0
= tcg_temp_new_i64();
1186 tcg_gen_sari_i64(t0
, arg1
, 63);
1187 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1188 tcg_temp_free_i64(t0
);
1190 gen_helper_rem_i64(ret
, arg1
, arg2
);
1194 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1196 if (TCG_TARGET_HAS_div_i64
) {
1197 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1198 } else if (TCG_TARGET_HAS_div2_i64
) {
1199 TCGv_i64 t0
= tcg_temp_new_i64();
1200 tcg_gen_movi_i64(t0
, 0);
1201 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1202 tcg_temp_free_i64(t0
);
1204 gen_helper_divu_i64(ret
, arg1
, arg2
);
1208 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1210 if (TCG_TARGET_HAS_rem_i64
) {
1211 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1212 } else if (TCG_TARGET_HAS_div_i64
) {
1213 TCGv_i64 t0
= tcg_temp_new_i64();
1214 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1215 tcg_gen_mul_i64(t0
, t0
, arg2
);
1216 tcg_gen_sub_i64(ret
, arg1
, t0
);
1217 tcg_temp_free_i64(t0
);
1218 } else if (TCG_TARGET_HAS_div2_i64
) {
1219 TCGv_i64 t0
= tcg_temp_new_i64();
1220 tcg_gen_movi_i64(t0
, 0);
1221 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1222 tcg_temp_free_i64(t0
);
1224 gen_helper_remu_i64(ret
, arg1
, arg2
);
1228 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1230 if (TCG_TARGET_REG_BITS
== 32) {
1231 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1232 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1233 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1234 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1236 tcg_gen_shli_i64(ret
, arg
, 56);
1237 tcg_gen_sari_i64(ret
, ret
, 56);
1241 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1243 if (TCG_TARGET_REG_BITS
== 32) {
1244 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1245 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1246 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1247 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1249 tcg_gen_shli_i64(ret
, arg
, 48);
1250 tcg_gen_sari_i64(ret
, ret
, 48);
1254 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1256 if (TCG_TARGET_REG_BITS
== 32) {
1257 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1258 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1259 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1260 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1262 tcg_gen_shli_i64(ret
, arg
, 32);
1263 tcg_gen_sari_i64(ret
, ret
, 32);
1267 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1269 if (TCG_TARGET_REG_BITS
== 32) {
1270 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1271 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1272 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1273 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1275 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1279 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1281 if (TCG_TARGET_REG_BITS
== 32) {
1282 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1283 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1284 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1285 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1287 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1291 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1293 if (TCG_TARGET_REG_BITS
== 32) {
1294 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1295 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1296 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1297 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1299 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1303 /* Note: we assume the six high bytes are set to zero */
1304 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1306 if (TCG_TARGET_REG_BITS
== 32) {
1307 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1308 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1309 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1310 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1312 TCGv_i64 t0
= tcg_temp_new_i64();
1314 tcg_gen_ext8u_i64(t0
, arg
);
1315 tcg_gen_shli_i64(t0
, t0
, 8);
1316 tcg_gen_shri_i64(ret
, arg
, 8);
1317 tcg_gen_or_i64(ret
, ret
, t0
);
1318 tcg_temp_free_i64(t0
);
1322 /* Note: we assume the four high bytes are set to zero */
1323 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1325 if (TCG_TARGET_REG_BITS
== 32) {
1326 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1327 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1328 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1329 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1332 t0
= tcg_temp_new_i64();
1333 t1
= tcg_temp_new_i64();
1335 tcg_gen_shli_i64(t0
, arg
, 24);
1336 tcg_gen_ext32u_i64(t0
, t0
);
1338 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1339 tcg_gen_shli_i64(t1
, t1
, 8);
1340 tcg_gen_or_i64(t0
, t0
, t1
);
1342 tcg_gen_shri_i64(t1
, arg
, 8);
1343 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1344 tcg_gen_or_i64(t0
, t0
, t1
);
1346 tcg_gen_shri_i64(t1
, arg
, 24);
1347 tcg_gen_or_i64(ret
, t0
, t1
);
1348 tcg_temp_free_i64(t0
);
1349 tcg_temp_free_i64(t1
);
1353 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1355 if (TCG_TARGET_REG_BITS
== 32) {
1357 t0
= tcg_temp_new_i32();
1358 t1
= tcg_temp_new_i32();
1360 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1361 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1362 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1363 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1364 tcg_temp_free_i32(t0
);
1365 tcg_temp_free_i32(t1
);
1366 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1367 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1369 TCGv_i64 t0
= tcg_temp_new_i64();
1370 TCGv_i64 t1
= tcg_temp_new_i64();
1372 tcg_gen_shli_i64(t0
, arg
, 56);
1374 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1375 tcg_gen_shli_i64(t1
, t1
, 40);
1376 tcg_gen_or_i64(t0
, t0
, t1
);
1378 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1379 tcg_gen_shli_i64(t1
, t1
, 24);
1380 tcg_gen_or_i64(t0
, t0
, t1
);
1382 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1383 tcg_gen_shli_i64(t1
, t1
, 8);
1384 tcg_gen_or_i64(t0
, t0
, t1
);
1386 tcg_gen_shri_i64(t1
, arg
, 8);
1387 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1388 tcg_gen_or_i64(t0
, t0
, t1
);
1390 tcg_gen_shri_i64(t1
, arg
, 24);
1391 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1392 tcg_gen_or_i64(t0
, t0
, t1
);
1394 tcg_gen_shri_i64(t1
, arg
, 40);
1395 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1396 tcg_gen_or_i64(t0
, t0
, t1
);
1398 tcg_gen_shri_i64(t1
, arg
, 56);
1399 tcg_gen_or_i64(ret
, t0
, t1
);
1400 tcg_temp_free_i64(t0
);
1401 tcg_temp_free_i64(t1
);
1405 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1407 if (TCG_TARGET_REG_BITS
== 32) {
1408 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1409 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1410 } else if (TCG_TARGET_HAS_not_i64
) {
1411 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1413 tcg_gen_xori_i64(ret
, arg
, -1);
1417 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1419 if (TCG_TARGET_REG_BITS
== 32) {
1420 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1421 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1422 } else if (TCG_TARGET_HAS_andc_i64
) {
1423 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1425 TCGv_i64 t0
= tcg_temp_new_i64();
1426 tcg_gen_not_i64(t0
, arg2
);
1427 tcg_gen_and_i64(ret
, arg1
, t0
);
1428 tcg_temp_free_i64(t0
);
1432 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1434 if (TCG_TARGET_REG_BITS
== 32) {
1435 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1436 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1437 } else if (TCG_TARGET_HAS_eqv_i64
) {
1438 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1440 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1441 tcg_gen_not_i64(ret
, ret
);
1445 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1447 if (TCG_TARGET_REG_BITS
== 32) {
1448 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1449 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1450 } else if (TCG_TARGET_HAS_nand_i64
) {
1451 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1453 tcg_gen_and_i64(ret
, arg1
, arg2
);
1454 tcg_gen_not_i64(ret
, ret
);
1458 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1460 if (TCG_TARGET_REG_BITS
== 32) {
1461 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1462 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1463 } else if (TCG_TARGET_HAS_nor_i64
) {
1464 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1466 tcg_gen_or_i64(ret
, arg1
, arg2
);
1467 tcg_gen_not_i64(ret
, ret
);
1471 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1473 if (TCG_TARGET_REG_BITS
== 32) {
1474 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1475 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1476 } else if (TCG_TARGET_HAS_orc_i64
) {
1477 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1479 TCGv_i64 t0
= tcg_temp_new_i64();
1480 tcg_gen_not_i64(t0
, arg2
);
1481 tcg_gen_or_i64(ret
, arg1
, t0
);
1482 tcg_temp_free_i64(t0
);
1486 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1488 if (TCG_TARGET_HAS_rot_i64
) {
1489 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1492 t0
= tcg_temp_new_i64();
1493 t1
= tcg_temp_new_i64();
1494 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1495 tcg_gen_subfi_i64(t1
, 64, arg2
);
1496 tcg_gen_shr_i64(t1
, arg1
, t1
);
1497 tcg_gen_or_i64(ret
, t0
, t1
);
1498 tcg_temp_free_i64(t0
);
1499 tcg_temp_free_i64(t1
);
1503 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1505 tcg_debug_assert(arg2
< 64);
1506 /* some cases can be optimized here */
1508 tcg_gen_mov_i64(ret
, arg1
);
1509 } else if (TCG_TARGET_HAS_rot_i64
) {
1510 TCGv_i64 t0
= tcg_const_i64(arg2
);
1511 tcg_gen_rotl_i64(ret
, arg1
, t0
);
1512 tcg_temp_free_i64(t0
);
1515 t0
= tcg_temp_new_i64();
1516 t1
= tcg_temp_new_i64();
1517 tcg_gen_shli_i64(t0
, arg1
, arg2
);
1518 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
1519 tcg_gen_or_i64(ret
, t0
, t1
);
1520 tcg_temp_free_i64(t0
);
1521 tcg_temp_free_i64(t1
);
1525 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1527 if (TCG_TARGET_HAS_rot_i64
) {
1528 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
1531 t0
= tcg_temp_new_i64();
1532 t1
= tcg_temp_new_i64();
1533 tcg_gen_shr_i64(t0
, arg1
, arg2
);
1534 tcg_gen_subfi_i64(t1
, 64, arg2
);
1535 tcg_gen_shl_i64(t1
, arg1
, t1
);
1536 tcg_gen_or_i64(ret
, t0
, t1
);
1537 tcg_temp_free_i64(t0
);
1538 tcg_temp_free_i64(t1
);
1542 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1544 tcg_debug_assert(arg2
< 64);
1545 /* some cases can be optimized here */
1547 tcg_gen_mov_i64(ret
, arg1
);
1549 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
1553 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
1554 unsigned int ofs
, unsigned int len
)
1559 tcg_debug_assert(ofs
< 64);
1560 tcg_debug_assert(len
<= 64);
1561 tcg_debug_assert(ofs
+ len
<= 64);
1563 if (ofs
== 0 && len
== 64) {
1564 tcg_gen_mov_i64(ret
, arg2
);
1567 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
1568 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
1572 if (TCG_TARGET_REG_BITS
== 32) {
1574 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
1575 TCGV_LOW(arg2
), ofs
- 32, len
);
1576 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1579 if (ofs
+ len
<= 32) {
1580 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
1581 TCGV_LOW(arg2
), ofs
, len
);
1582 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1587 mask
= (1ull << len
) - 1;
1588 t1
= tcg_temp_new_i64();
1590 if (ofs
+ len
< 64) {
1591 tcg_gen_andi_i64(t1
, arg2
, mask
);
1592 tcg_gen_shli_i64(t1
, t1
, ofs
);
1594 tcg_gen_shli_i64(t1
, arg2
, ofs
);
1596 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
1597 tcg_gen_or_i64(ret
, ret
, t1
);
1599 tcg_temp_free_i64(t1
);
1602 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
1603 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
1605 if (cond
== TCG_COND_ALWAYS
) {
1606 tcg_gen_mov_i64(ret
, v1
);
1607 } else if (cond
== TCG_COND_NEVER
) {
1608 tcg_gen_mov_i64(ret
, v2
);
1609 } else if (TCG_TARGET_REG_BITS
== 32) {
1610 TCGv_i32 t0
= tcg_temp_new_i32();
1611 TCGv_i32 t1
= tcg_temp_new_i32();
1612 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
1613 TCGV_LOW(c1
), TCGV_HIGH(c1
),
1614 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
1616 if (TCG_TARGET_HAS_movcond_i32
) {
1617 tcg_gen_movi_i32(t1
, 0);
1618 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
1619 TCGV_LOW(v1
), TCGV_LOW(v2
));
1620 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
1621 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
1623 tcg_gen_neg_i32(t0
, t0
);
1625 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
1626 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
1627 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
1629 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
1630 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
1631 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
1633 tcg_temp_free_i32(t0
);
1634 tcg_temp_free_i32(t1
);
1635 } else if (TCG_TARGET_HAS_movcond_i64
) {
1636 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
1638 TCGv_i64 t0
= tcg_temp_new_i64();
1639 TCGv_i64 t1
= tcg_temp_new_i64();
1640 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
1641 tcg_gen_neg_i64(t0
, t0
);
1642 tcg_gen_and_i64(t1
, v1
, t0
);
1643 tcg_gen_andc_i64(ret
, v2
, t0
);
1644 tcg_gen_or_i64(ret
, ret
, t1
);
1645 tcg_temp_free_i64(t0
);
1646 tcg_temp_free_i64(t1
);
1650 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
1651 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
1653 if (TCG_TARGET_HAS_add2_i64
) {
1654 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
1656 TCGv_i64 t0
= tcg_temp_new_i64();
1657 TCGv_i64 t1
= tcg_temp_new_i64();
1658 tcg_gen_add_i64(t0
, al
, bl
);
1659 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
1660 tcg_gen_add_i64(rh
, ah
, bh
);
1661 tcg_gen_add_i64(rh
, rh
, t1
);
1662 tcg_gen_mov_i64(rl
, t0
);
1663 tcg_temp_free_i64(t0
);
1664 tcg_temp_free_i64(t1
);
1668 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
1669 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
1671 if (TCG_TARGET_HAS_sub2_i64
) {
1672 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
1674 TCGv_i64 t0
= tcg_temp_new_i64();
1675 TCGv_i64 t1
= tcg_temp_new_i64();
1676 tcg_gen_sub_i64(t0
, al
, bl
);
1677 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
1678 tcg_gen_sub_i64(rh
, ah
, bh
);
1679 tcg_gen_sub_i64(rh
, rh
, t1
);
1680 tcg_gen_mov_i64(rl
, t0
);
1681 tcg_temp_free_i64(t0
);
1682 tcg_temp_free_i64(t1
);
1686 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1688 if (TCG_TARGET_HAS_mulu2_i64
) {
1689 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
1690 } else if (TCG_TARGET_HAS_muluh_i64
) {
1691 TCGv_i64 t
= tcg_temp_new_i64();
1692 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
1693 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
1694 tcg_gen_mov_i64(rl
, t
);
1695 tcg_temp_free_i64(t
);
1697 TCGv_i64 t0
= tcg_temp_new_i64();
1698 tcg_gen_mul_i64(t0
, arg1
, arg2
);
1699 gen_helper_muluh_i64(rh
, arg1
, arg2
);
1700 tcg_gen_mov_i64(rl
, t0
);
1701 tcg_temp_free_i64(t0
);
1705 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1707 if (TCG_TARGET_HAS_muls2_i64
) {
1708 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
1709 } else if (TCG_TARGET_HAS_mulsh_i64
) {
1710 TCGv_i64 t
= tcg_temp_new_i64();
1711 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
1712 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
1713 tcg_gen_mov_i64(rl
, t
);
1714 tcg_temp_free_i64(t
);
1715 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
1716 TCGv_i64 t0
= tcg_temp_new_i64();
1717 TCGv_i64 t1
= tcg_temp_new_i64();
1718 TCGv_i64 t2
= tcg_temp_new_i64();
1719 TCGv_i64 t3
= tcg_temp_new_i64();
1720 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
1721 /* Adjust for negative inputs. */
1722 tcg_gen_sari_i64(t2
, arg1
, 63);
1723 tcg_gen_sari_i64(t3
, arg2
, 63);
1724 tcg_gen_and_i64(t2
, t2
, arg2
);
1725 tcg_gen_and_i64(t3
, t3
, arg1
);
1726 tcg_gen_sub_i64(rh
, t1
, t2
);
1727 tcg_gen_sub_i64(rh
, rh
, t3
);
1728 tcg_gen_mov_i64(rl
, t0
);
1729 tcg_temp_free_i64(t0
);
1730 tcg_temp_free_i64(t1
);
1731 tcg_temp_free_i64(t2
);
1732 tcg_temp_free_i64(t3
);
1734 TCGv_i64 t0
= tcg_temp_new_i64();
1735 tcg_gen_mul_i64(t0
, arg1
, arg2
);
1736 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
1737 tcg_gen_mov_i64(rl
, t0
);
1738 tcg_temp_free_i64(t0
);
1742 /* Size changing operations. */
1744 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
1746 if (TCG_TARGET_REG_BITS
== 32) {
1747 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
1748 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
1749 tcg_gen_op2(&tcg_ctx
, INDEX_op_extrl_i64_i32
,
1750 GET_TCGV_I32(ret
), GET_TCGV_I64(arg
));
1752 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(arg
)));
1756 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
1758 if (TCG_TARGET_REG_BITS
== 32) {
1759 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
1760 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
1761 tcg_gen_op2(&tcg_ctx
, INDEX_op_extrh_i64_i32
,
1762 GET_TCGV_I32(ret
), GET_TCGV_I64(arg
));
1764 TCGv_i64 t
= tcg_temp_new_i64();
1765 tcg_gen_shri_i64(t
, arg
, 32);
1766 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(t
)));
1767 tcg_temp_free_i64(t
);
1771 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1773 if (TCG_TARGET_REG_BITS
== 32) {
1774 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1775 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1777 tcg_gen_op2(&tcg_ctx
, INDEX_op_extu_i32_i64
,
1778 GET_TCGV_I64(ret
), GET_TCGV_I32(arg
));
1782 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
1784 if (TCG_TARGET_REG_BITS
== 32) {
1785 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
1786 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1788 tcg_gen_op2(&tcg_ctx
, INDEX_op_ext_i32_i64
,
1789 GET_TCGV_I64(ret
), GET_TCGV_I32(arg
));
1793 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
1797 if (TCG_TARGET_REG_BITS
== 32) {
1798 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
1799 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
1803 tmp
= tcg_temp_new_i64();
1804 /* These extensions are only needed for type correctness.
1805 We may be able to do better given target specific information. */
1806 tcg_gen_extu_i32_i64(tmp
, high
);
1807 tcg_gen_extu_i32_i64(dest
, low
);
1808 /* If deposit is available, use it. Otherwise use the extra
1809 knowledge that we have of the zero-extensions above. */
1810 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
1811 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
1813 tcg_gen_shli_i64(tmp
, tmp
, 32);
1814 tcg_gen_or_i64(dest
, dest
, tmp
);
1816 tcg_temp_free_i64(tmp
);
1819 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
1821 if (TCG_TARGET_REG_BITS
== 32) {
1822 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
1823 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
1825 tcg_gen_extrl_i64_i32(lo
, arg
);
1826 tcg_gen_extrh_i64_i32(hi
, arg
);
1830 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
1832 tcg_gen_ext32u_i64(lo
, arg
);
1833 tcg_gen_shri_i64(hi
, arg
, 32);
1836 /* QEMU specific operations. */
1838 void tcg_gen_goto_tb(unsigned idx
)
1840 /* We only support two chained exits. */
1841 tcg_debug_assert(idx
<= 1);
1842 #ifdef CONFIG_DEBUG_TCG
1843 /* Verify that we havn't seen this numbered exit before. */
1844 tcg_debug_assert((tcg_ctx
.goto_tb_issue_mask
& (1 << idx
)) == 0);
1845 tcg_ctx
.goto_tb_issue_mask
|= 1 << idx
;
1847 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
1850 static inline TCGMemOp
tcg_canonicalize_memop(TCGMemOp op
, bool is64
, bool st
)
1852 switch (op
& MO_SIZE
) {
1875 static void gen_ldst_i32(TCGOpcode opc
, TCGv_i32 val
, TCGv addr
,
1876 TCGMemOp memop
, TCGArg idx
)
1878 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
1879 #if TARGET_LONG_BITS == 32
1880 tcg_gen_op3i_i32(opc
, val
, addr
, oi
);
1882 if (TCG_TARGET_REG_BITS
== 32) {
1883 tcg_gen_op4i_i32(opc
, val
, TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
1885 tcg_gen_op3(&tcg_ctx
, opc
, GET_TCGV_I32(val
), GET_TCGV_I64(addr
), oi
);
1890 static void gen_ldst_i64(TCGOpcode opc
, TCGv_i64 val
, TCGv addr
,
1891 TCGMemOp memop
, TCGArg idx
)
1893 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
1894 #if TARGET_LONG_BITS == 32
1895 if (TCG_TARGET_REG_BITS
== 32) {
1896 tcg_gen_op4i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
), addr
, oi
);
1898 tcg_gen_op3(&tcg_ctx
, opc
, GET_TCGV_I64(val
), GET_TCGV_I32(addr
), oi
);
1901 if (TCG_TARGET_REG_BITS
== 32) {
1902 tcg_gen_op5i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
),
1903 TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
1905 tcg_gen_op3i_i64(opc
, val
, addr
, oi
);
1910 void tcg_gen_qemu_ld_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
1912 memop
= tcg_canonicalize_memop(memop
, 0, 0);
1913 gen_ldst_i32(INDEX_op_qemu_ld_i32
, val
, addr
, memop
, idx
);
1916 void tcg_gen_qemu_st_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
1918 memop
= tcg_canonicalize_memop(memop
, 0, 1);
1919 gen_ldst_i32(INDEX_op_qemu_st_i32
, val
, addr
, memop
, idx
);
1922 void tcg_gen_qemu_ld_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
1924 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
1925 tcg_gen_qemu_ld_i32(TCGV_LOW(val
), addr
, idx
, memop
);
1926 if (memop
& MO_SIGN
) {
1927 tcg_gen_sari_i32(TCGV_HIGH(val
), TCGV_LOW(val
), 31);
1929 tcg_gen_movi_i32(TCGV_HIGH(val
), 0);
1934 memop
= tcg_canonicalize_memop(memop
, 1, 0);
1935 gen_ldst_i64(INDEX_op_qemu_ld_i64
, val
, addr
, memop
, idx
);
1938 void tcg_gen_qemu_st_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
1940 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
1941 tcg_gen_qemu_st_i32(TCGV_LOW(val
), addr
, idx
, memop
);
1945 memop
= tcg_canonicalize_memop(memop
, 1, 1);
1946 gen_ldst_i64(INDEX_op_qemu_st_i64
, val
, addr
, memop
, idx
);