target-arm: Log AArch64 exception returns
[qemu.git] / tcg / tcg-op.c
blob6e2fb3522f8ebadd56c91bcf0caa23e613890d34
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "exec/exec-all.h"
29 #include "tcg.h"
30 #include "tcg-op.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
34 /* Reduce the number of ifdefs below. This assumes that all uses of
35 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36 the compiler can eliminate. */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
44 /* Note that this is optimized for sequential allocation during translate.
45 Up to and including filling in the forward link immediately. We'll do
46 proper termination of the end of the list after we finish translation. */
48 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
50 int oi = ctx->gen_next_op_idx;
51 int ni = oi + 1;
52 int pi = oi - 1;
54 tcg_debug_assert(oi < OPC_BUF_SIZE);
55 ctx->gen_op_buf[0].prev = oi;
56 ctx->gen_next_op_idx = ni;
58 ctx->gen_op_buf[oi] = (TCGOp){
59 .opc = opc,
60 .args = args,
61 .prev = pi,
62 .next = ni
66 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
68 int pi = ctx->gen_next_parm_idx;
70 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71 ctx->gen_next_parm_idx = pi + 1;
72 ctx->gen_opparam_buf[pi] = a1;
74 tcg_emit_op(ctx, opc, pi);
77 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
79 int pi = ctx->gen_next_parm_idx;
81 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82 ctx->gen_next_parm_idx = pi + 2;
83 ctx->gen_opparam_buf[pi + 0] = a1;
84 ctx->gen_opparam_buf[pi + 1] = a2;
86 tcg_emit_op(ctx, opc, pi);
89 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90 TCGArg a2, TCGArg a3)
92 int pi = ctx->gen_next_parm_idx;
94 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95 ctx->gen_next_parm_idx = pi + 3;
96 ctx->gen_opparam_buf[pi + 0] = a1;
97 ctx->gen_opparam_buf[pi + 1] = a2;
98 ctx->gen_opparam_buf[pi + 2] = a3;
100 tcg_emit_op(ctx, opc, pi);
103 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104 TCGArg a2, TCGArg a3, TCGArg a4)
106 int pi = ctx->gen_next_parm_idx;
108 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109 ctx->gen_next_parm_idx = pi + 4;
110 ctx->gen_opparam_buf[pi + 0] = a1;
111 ctx->gen_opparam_buf[pi + 1] = a2;
112 ctx->gen_opparam_buf[pi + 2] = a3;
113 ctx->gen_opparam_buf[pi + 3] = a4;
115 tcg_emit_op(ctx, opc, pi);
118 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
121 int pi = ctx->gen_next_parm_idx;
123 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124 ctx->gen_next_parm_idx = pi + 5;
125 ctx->gen_opparam_buf[pi + 0] = a1;
126 ctx->gen_opparam_buf[pi + 1] = a2;
127 ctx->gen_opparam_buf[pi + 2] = a3;
128 ctx->gen_opparam_buf[pi + 3] = a4;
129 ctx->gen_opparam_buf[pi + 4] = a5;
131 tcg_emit_op(ctx, opc, pi);
134 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
137 int pi = ctx->gen_next_parm_idx;
139 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140 ctx->gen_next_parm_idx = pi + 6;
141 ctx->gen_opparam_buf[pi + 0] = a1;
142 ctx->gen_opparam_buf[pi + 1] = a2;
143 ctx->gen_opparam_buf[pi + 2] = a3;
144 ctx->gen_opparam_buf[pi + 3] = a4;
145 ctx->gen_opparam_buf[pi + 4] = a5;
146 ctx->gen_opparam_buf[pi + 5] = a6;
148 tcg_emit_op(ctx, opc, pi);
151 void tcg_gen_mb(TCGBar mb_type)
153 if (parallel_cpus) {
154 tcg_gen_op1(&tcg_ctx, INDEX_op_mb, mb_type);
158 /* 32 bit ops */
160 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
162 /* some cases can be optimized here */
163 if (arg2 == 0) {
164 tcg_gen_mov_i32(ret, arg1);
165 } else {
166 TCGv_i32 t0 = tcg_const_i32(arg2);
167 tcg_gen_add_i32(ret, arg1, t0);
168 tcg_temp_free_i32(t0);
172 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
174 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
175 /* Don't recurse with tcg_gen_neg_i32. */
176 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
177 } else {
178 TCGv_i32 t0 = tcg_const_i32(arg1);
179 tcg_gen_sub_i32(ret, t0, arg2);
180 tcg_temp_free_i32(t0);
184 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
186 /* some cases can be optimized here */
187 if (arg2 == 0) {
188 tcg_gen_mov_i32(ret, arg1);
189 } else {
190 TCGv_i32 t0 = tcg_const_i32(arg2);
191 tcg_gen_sub_i32(ret, arg1, t0);
192 tcg_temp_free_i32(t0);
196 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
198 TCGv_i32 t0;
199 /* Some cases can be optimized here. */
200 switch (arg2) {
201 case 0:
202 tcg_gen_movi_i32(ret, 0);
203 return;
204 case 0xffffffffu:
205 tcg_gen_mov_i32(ret, arg1);
206 return;
207 case 0xffu:
208 /* Don't recurse with tcg_gen_ext8u_i32. */
209 if (TCG_TARGET_HAS_ext8u_i32) {
210 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
211 return;
213 break;
214 case 0xffffu:
215 if (TCG_TARGET_HAS_ext16u_i32) {
216 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
217 return;
219 break;
221 t0 = tcg_const_i32(arg2);
222 tcg_gen_and_i32(ret, arg1, t0);
223 tcg_temp_free_i32(t0);
226 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
228 /* Some cases can be optimized here. */
229 if (arg2 == -1) {
230 tcg_gen_movi_i32(ret, -1);
231 } else if (arg2 == 0) {
232 tcg_gen_mov_i32(ret, arg1);
233 } else {
234 TCGv_i32 t0 = tcg_const_i32(arg2);
235 tcg_gen_or_i32(ret, arg1, t0);
236 tcg_temp_free_i32(t0);
240 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
242 /* Some cases can be optimized here. */
243 if (arg2 == 0) {
244 tcg_gen_mov_i32(ret, arg1);
245 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
246 /* Don't recurse with tcg_gen_not_i32. */
247 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
248 } else {
249 TCGv_i32 t0 = tcg_const_i32(arg2);
250 tcg_gen_xor_i32(ret, arg1, t0);
251 tcg_temp_free_i32(t0);
255 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
257 tcg_debug_assert(arg2 < 32);
258 if (arg2 == 0) {
259 tcg_gen_mov_i32(ret, arg1);
260 } else {
261 TCGv_i32 t0 = tcg_const_i32(arg2);
262 tcg_gen_shl_i32(ret, arg1, t0);
263 tcg_temp_free_i32(t0);
267 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
269 tcg_debug_assert(arg2 < 32);
270 if (arg2 == 0) {
271 tcg_gen_mov_i32(ret, arg1);
272 } else {
273 TCGv_i32 t0 = tcg_const_i32(arg2);
274 tcg_gen_shr_i32(ret, arg1, t0);
275 tcg_temp_free_i32(t0);
279 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
281 tcg_debug_assert(arg2 < 32);
282 if (arg2 == 0) {
283 tcg_gen_mov_i32(ret, arg1);
284 } else {
285 TCGv_i32 t0 = tcg_const_i32(arg2);
286 tcg_gen_sar_i32(ret, arg1, t0);
287 tcg_temp_free_i32(t0);
291 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
293 if (cond == TCG_COND_ALWAYS) {
294 tcg_gen_br(l);
295 } else if (cond != TCG_COND_NEVER) {
296 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
300 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
302 if (cond == TCG_COND_ALWAYS) {
303 tcg_gen_br(l);
304 } else if (cond != TCG_COND_NEVER) {
305 TCGv_i32 t0 = tcg_const_i32(arg2);
306 tcg_gen_brcond_i32(cond, arg1, t0, l);
307 tcg_temp_free_i32(t0);
311 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
312 TCGv_i32 arg1, TCGv_i32 arg2)
314 if (cond == TCG_COND_ALWAYS) {
315 tcg_gen_movi_i32(ret, 1);
316 } else if (cond == TCG_COND_NEVER) {
317 tcg_gen_movi_i32(ret, 0);
318 } else {
319 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
323 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
324 TCGv_i32 arg1, int32_t arg2)
326 TCGv_i32 t0 = tcg_const_i32(arg2);
327 tcg_gen_setcond_i32(cond, ret, arg1, t0);
328 tcg_temp_free_i32(t0);
331 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
333 TCGv_i32 t0 = tcg_const_i32(arg2);
334 tcg_gen_mul_i32(ret, arg1, t0);
335 tcg_temp_free_i32(t0);
338 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
340 if (TCG_TARGET_HAS_div_i32) {
341 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
342 } else if (TCG_TARGET_HAS_div2_i32) {
343 TCGv_i32 t0 = tcg_temp_new_i32();
344 tcg_gen_sari_i32(t0, arg1, 31);
345 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
346 tcg_temp_free_i32(t0);
347 } else {
348 gen_helper_div_i32(ret, arg1, arg2);
352 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
354 if (TCG_TARGET_HAS_rem_i32) {
355 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
356 } else if (TCG_TARGET_HAS_div_i32) {
357 TCGv_i32 t0 = tcg_temp_new_i32();
358 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
359 tcg_gen_mul_i32(t0, t0, arg2);
360 tcg_gen_sub_i32(ret, arg1, t0);
361 tcg_temp_free_i32(t0);
362 } else if (TCG_TARGET_HAS_div2_i32) {
363 TCGv_i32 t0 = tcg_temp_new_i32();
364 tcg_gen_sari_i32(t0, arg1, 31);
365 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
366 tcg_temp_free_i32(t0);
367 } else {
368 gen_helper_rem_i32(ret, arg1, arg2);
372 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 if (TCG_TARGET_HAS_div_i32) {
375 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
376 } else if (TCG_TARGET_HAS_div2_i32) {
377 TCGv_i32 t0 = tcg_temp_new_i32();
378 tcg_gen_movi_i32(t0, 0);
379 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
380 tcg_temp_free_i32(t0);
381 } else {
382 gen_helper_divu_i32(ret, arg1, arg2);
386 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
388 if (TCG_TARGET_HAS_rem_i32) {
389 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
390 } else if (TCG_TARGET_HAS_div_i32) {
391 TCGv_i32 t0 = tcg_temp_new_i32();
392 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
393 tcg_gen_mul_i32(t0, t0, arg2);
394 tcg_gen_sub_i32(ret, arg1, t0);
395 tcg_temp_free_i32(t0);
396 } else if (TCG_TARGET_HAS_div2_i32) {
397 TCGv_i32 t0 = tcg_temp_new_i32();
398 tcg_gen_movi_i32(t0, 0);
399 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
400 tcg_temp_free_i32(t0);
401 } else {
402 gen_helper_remu_i32(ret, arg1, arg2);
406 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
408 if (TCG_TARGET_HAS_andc_i32) {
409 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
410 } else {
411 TCGv_i32 t0 = tcg_temp_new_i32();
412 tcg_gen_not_i32(t0, arg2);
413 tcg_gen_and_i32(ret, arg1, t0);
414 tcg_temp_free_i32(t0);
418 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
420 if (TCG_TARGET_HAS_eqv_i32) {
421 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
422 } else {
423 tcg_gen_xor_i32(ret, arg1, arg2);
424 tcg_gen_not_i32(ret, ret);
428 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
430 if (TCG_TARGET_HAS_nand_i32) {
431 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
432 } else {
433 tcg_gen_and_i32(ret, arg1, arg2);
434 tcg_gen_not_i32(ret, ret);
438 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
440 if (TCG_TARGET_HAS_nor_i32) {
441 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
442 } else {
443 tcg_gen_or_i32(ret, arg1, arg2);
444 tcg_gen_not_i32(ret, ret);
448 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
450 if (TCG_TARGET_HAS_orc_i32) {
451 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
452 } else {
453 TCGv_i32 t0 = tcg_temp_new_i32();
454 tcg_gen_not_i32(t0, arg2);
455 tcg_gen_or_i32(ret, arg1, t0);
456 tcg_temp_free_i32(t0);
460 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
462 if (TCG_TARGET_HAS_rot_i32) {
463 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
464 } else {
465 TCGv_i32 t0, t1;
467 t0 = tcg_temp_new_i32();
468 t1 = tcg_temp_new_i32();
469 tcg_gen_shl_i32(t0, arg1, arg2);
470 tcg_gen_subfi_i32(t1, 32, arg2);
471 tcg_gen_shr_i32(t1, arg1, t1);
472 tcg_gen_or_i32(ret, t0, t1);
473 tcg_temp_free_i32(t0);
474 tcg_temp_free_i32(t1);
478 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
480 tcg_debug_assert(arg2 < 32);
481 /* some cases can be optimized here */
482 if (arg2 == 0) {
483 tcg_gen_mov_i32(ret, arg1);
484 } else if (TCG_TARGET_HAS_rot_i32) {
485 TCGv_i32 t0 = tcg_const_i32(arg2);
486 tcg_gen_rotl_i32(ret, arg1, t0);
487 tcg_temp_free_i32(t0);
488 } else {
489 TCGv_i32 t0, t1;
490 t0 = tcg_temp_new_i32();
491 t1 = tcg_temp_new_i32();
492 tcg_gen_shli_i32(t0, arg1, arg2);
493 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
494 tcg_gen_or_i32(ret, t0, t1);
495 tcg_temp_free_i32(t0);
496 tcg_temp_free_i32(t1);
500 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
502 if (TCG_TARGET_HAS_rot_i32) {
503 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
504 } else {
505 TCGv_i32 t0, t1;
507 t0 = tcg_temp_new_i32();
508 t1 = tcg_temp_new_i32();
509 tcg_gen_shr_i32(t0, arg1, arg2);
510 tcg_gen_subfi_i32(t1, 32, arg2);
511 tcg_gen_shl_i32(t1, arg1, t1);
512 tcg_gen_or_i32(ret, t0, t1);
513 tcg_temp_free_i32(t0);
514 tcg_temp_free_i32(t1);
518 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
520 tcg_debug_assert(arg2 < 32);
521 /* some cases can be optimized here */
522 if (arg2 == 0) {
523 tcg_gen_mov_i32(ret, arg1);
524 } else {
525 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
529 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
530 unsigned int ofs, unsigned int len)
532 uint32_t mask;
533 TCGv_i32 t1;
535 tcg_debug_assert(ofs < 32);
536 tcg_debug_assert(len <= 32);
537 tcg_debug_assert(ofs + len <= 32);
539 if (ofs == 0 && len == 32) {
540 tcg_gen_mov_i32(ret, arg2);
541 return;
543 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
544 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
545 return;
548 mask = (1u << len) - 1;
549 t1 = tcg_temp_new_i32();
551 if (ofs + len < 32) {
552 tcg_gen_andi_i32(t1, arg2, mask);
553 tcg_gen_shli_i32(t1, t1, ofs);
554 } else {
555 tcg_gen_shli_i32(t1, arg2, ofs);
557 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
558 tcg_gen_or_i32(ret, ret, t1);
560 tcg_temp_free_i32(t1);
563 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
564 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
566 if (cond == TCG_COND_ALWAYS) {
567 tcg_gen_mov_i32(ret, v1);
568 } else if (cond == TCG_COND_NEVER) {
569 tcg_gen_mov_i32(ret, v2);
570 } else if (TCG_TARGET_HAS_movcond_i32) {
571 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
572 } else {
573 TCGv_i32 t0 = tcg_temp_new_i32();
574 TCGv_i32 t1 = tcg_temp_new_i32();
575 tcg_gen_setcond_i32(cond, t0, c1, c2);
576 tcg_gen_neg_i32(t0, t0);
577 tcg_gen_and_i32(t1, v1, t0);
578 tcg_gen_andc_i32(ret, v2, t0);
579 tcg_gen_or_i32(ret, ret, t1);
580 tcg_temp_free_i32(t0);
581 tcg_temp_free_i32(t1);
585 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
586 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
588 if (TCG_TARGET_HAS_add2_i32) {
589 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
590 } else {
591 TCGv_i64 t0 = tcg_temp_new_i64();
592 TCGv_i64 t1 = tcg_temp_new_i64();
593 tcg_gen_concat_i32_i64(t0, al, ah);
594 tcg_gen_concat_i32_i64(t1, bl, bh);
595 tcg_gen_add_i64(t0, t0, t1);
596 tcg_gen_extr_i64_i32(rl, rh, t0);
597 tcg_temp_free_i64(t0);
598 tcg_temp_free_i64(t1);
602 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
603 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
605 if (TCG_TARGET_HAS_sub2_i32) {
606 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
607 } else {
608 TCGv_i64 t0 = tcg_temp_new_i64();
609 TCGv_i64 t1 = tcg_temp_new_i64();
610 tcg_gen_concat_i32_i64(t0, al, ah);
611 tcg_gen_concat_i32_i64(t1, bl, bh);
612 tcg_gen_sub_i64(t0, t0, t1);
613 tcg_gen_extr_i64_i32(rl, rh, t0);
614 tcg_temp_free_i64(t0);
615 tcg_temp_free_i64(t1);
619 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
621 if (TCG_TARGET_HAS_mulu2_i32) {
622 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
623 } else if (TCG_TARGET_HAS_muluh_i32) {
624 TCGv_i32 t = tcg_temp_new_i32();
625 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
626 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
627 tcg_gen_mov_i32(rl, t);
628 tcg_temp_free_i32(t);
629 } else {
630 TCGv_i64 t0 = tcg_temp_new_i64();
631 TCGv_i64 t1 = tcg_temp_new_i64();
632 tcg_gen_extu_i32_i64(t0, arg1);
633 tcg_gen_extu_i32_i64(t1, arg2);
634 tcg_gen_mul_i64(t0, t0, t1);
635 tcg_gen_extr_i64_i32(rl, rh, t0);
636 tcg_temp_free_i64(t0);
637 tcg_temp_free_i64(t1);
641 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
643 if (TCG_TARGET_HAS_muls2_i32) {
644 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
645 } else if (TCG_TARGET_HAS_mulsh_i32) {
646 TCGv_i32 t = tcg_temp_new_i32();
647 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
648 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
649 tcg_gen_mov_i32(rl, t);
650 tcg_temp_free_i32(t);
651 } else if (TCG_TARGET_REG_BITS == 32) {
652 TCGv_i32 t0 = tcg_temp_new_i32();
653 TCGv_i32 t1 = tcg_temp_new_i32();
654 TCGv_i32 t2 = tcg_temp_new_i32();
655 TCGv_i32 t3 = tcg_temp_new_i32();
656 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
657 /* Adjust for negative inputs. */
658 tcg_gen_sari_i32(t2, arg1, 31);
659 tcg_gen_sari_i32(t3, arg2, 31);
660 tcg_gen_and_i32(t2, t2, arg2);
661 tcg_gen_and_i32(t3, t3, arg1);
662 tcg_gen_sub_i32(rh, t1, t2);
663 tcg_gen_sub_i32(rh, rh, t3);
664 tcg_gen_mov_i32(rl, t0);
665 tcg_temp_free_i32(t0);
666 tcg_temp_free_i32(t1);
667 tcg_temp_free_i32(t2);
668 tcg_temp_free_i32(t3);
669 } else {
670 TCGv_i64 t0 = tcg_temp_new_i64();
671 TCGv_i64 t1 = tcg_temp_new_i64();
672 tcg_gen_ext_i32_i64(t0, arg1);
673 tcg_gen_ext_i32_i64(t1, arg2);
674 tcg_gen_mul_i64(t0, t0, t1);
675 tcg_gen_extr_i64_i32(rl, rh, t0);
676 tcg_temp_free_i64(t0);
677 tcg_temp_free_i64(t1);
681 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
683 if (TCG_TARGET_REG_BITS == 32) {
684 TCGv_i32 t0 = tcg_temp_new_i32();
685 TCGv_i32 t1 = tcg_temp_new_i32();
686 TCGv_i32 t2 = tcg_temp_new_i32();
687 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
688 /* Adjust for negative input for the signed arg1. */
689 tcg_gen_sari_i32(t2, arg1, 31);
690 tcg_gen_and_i32(t2, t2, arg2);
691 tcg_gen_sub_i32(rh, t1, t2);
692 tcg_gen_mov_i32(rl, t0);
693 tcg_temp_free_i32(t0);
694 tcg_temp_free_i32(t1);
695 tcg_temp_free_i32(t2);
696 } else {
697 TCGv_i64 t0 = tcg_temp_new_i64();
698 TCGv_i64 t1 = tcg_temp_new_i64();
699 tcg_gen_ext_i32_i64(t0, arg1);
700 tcg_gen_extu_i32_i64(t1, arg2);
701 tcg_gen_mul_i64(t0, t0, t1);
702 tcg_gen_extr_i64_i32(rl, rh, t0);
703 tcg_temp_free_i64(t0);
704 tcg_temp_free_i64(t1);
708 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
710 if (TCG_TARGET_HAS_ext8s_i32) {
711 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
712 } else {
713 tcg_gen_shli_i32(ret, arg, 24);
714 tcg_gen_sari_i32(ret, ret, 24);
718 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
720 if (TCG_TARGET_HAS_ext16s_i32) {
721 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
722 } else {
723 tcg_gen_shli_i32(ret, arg, 16);
724 tcg_gen_sari_i32(ret, ret, 16);
728 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
730 if (TCG_TARGET_HAS_ext8u_i32) {
731 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
732 } else {
733 tcg_gen_andi_i32(ret, arg, 0xffu);
737 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
739 if (TCG_TARGET_HAS_ext16u_i32) {
740 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
741 } else {
742 tcg_gen_andi_i32(ret, arg, 0xffffu);
746 /* Note: we assume the two high bytes are set to zero */
747 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
749 if (TCG_TARGET_HAS_bswap16_i32) {
750 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
751 } else {
752 TCGv_i32 t0 = tcg_temp_new_i32();
754 tcg_gen_ext8u_i32(t0, arg);
755 tcg_gen_shli_i32(t0, t0, 8);
756 tcg_gen_shri_i32(ret, arg, 8);
757 tcg_gen_or_i32(ret, ret, t0);
758 tcg_temp_free_i32(t0);
762 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
764 if (TCG_TARGET_HAS_bswap32_i32) {
765 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
766 } else {
767 TCGv_i32 t0, t1;
768 t0 = tcg_temp_new_i32();
769 t1 = tcg_temp_new_i32();
771 tcg_gen_shli_i32(t0, arg, 24);
773 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
774 tcg_gen_shli_i32(t1, t1, 8);
775 tcg_gen_or_i32(t0, t0, t1);
777 tcg_gen_shri_i32(t1, arg, 8);
778 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
779 tcg_gen_or_i32(t0, t0, t1);
781 tcg_gen_shri_i32(t1, arg, 24);
782 tcg_gen_or_i32(ret, t0, t1);
783 tcg_temp_free_i32(t0);
784 tcg_temp_free_i32(t1);
788 /* 64-bit ops */
790 #if TCG_TARGET_REG_BITS == 32
791 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
793 void tcg_gen_discard_i64(TCGv_i64 arg)
795 tcg_gen_discard_i32(TCGV_LOW(arg));
796 tcg_gen_discard_i32(TCGV_HIGH(arg));
799 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
801 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
802 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
805 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
807 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
808 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
811 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
813 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
814 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
817 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
819 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
820 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
823 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
825 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
826 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
829 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
831 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
832 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
835 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
837 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
838 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
841 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
843 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
844 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
847 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
849 /* Since arg2 and ret have different types,
850 they cannot be the same temporary */
851 #ifdef HOST_WORDS_BIGENDIAN
852 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
853 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
854 #else
855 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
856 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
857 #endif
860 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
862 #ifdef HOST_WORDS_BIGENDIAN
863 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
864 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
865 #else
866 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
867 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
868 #endif
871 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
873 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
874 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
877 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
879 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
880 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
883 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
885 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
886 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
889 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
891 gen_helper_shl_i64(ret, arg1, arg2);
894 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
896 gen_helper_shr_i64(ret, arg1, arg2);
899 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
901 gen_helper_sar_i64(ret, arg1, arg2);
904 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
906 TCGv_i64 t0;
907 TCGv_i32 t1;
909 t0 = tcg_temp_new_i64();
910 t1 = tcg_temp_new_i32();
912 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
913 TCGV_LOW(arg1), TCGV_LOW(arg2));
915 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
916 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
917 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
918 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
920 tcg_gen_mov_i64(ret, t0);
921 tcg_temp_free_i64(t0);
922 tcg_temp_free_i32(t1);
924 #endif /* TCG_TARGET_REG_SIZE == 32 */
926 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
928 /* some cases can be optimized here */
929 if (arg2 == 0) {
930 tcg_gen_mov_i64(ret, arg1);
931 } else {
932 TCGv_i64 t0 = tcg_const_i64(arg2);
933 tcg_gen_add_i64(ret, arg1, t0);
934 tcg_temp_free_i64(t0);
938 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
940 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
941 /* Don't recurse with tcg_gen_neg_i64. */
942 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
943 } else {
944 TCGv_i64 t0 = tcg_const_i64(arg1);
945 tcg_gen_sub_i64(ret, t0, arg2);
946 tcg_temp_free_i64(t0);
950 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
952 /* some cases can be optimized here */
953 if (arg2 == 0) {
954 tcg_gen_mov_i64(ret, arg1);
955 } else {
956 TCGv_i64 t0 = tcg_const_i64(arg2);
957 tcg_gen_sub_i64(ret, arg1, t0);
958 tcg_temp_free_i64(t0);
962 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
964 TCGv_i64 t0;
966 if (TCG_TARGET_REG_BITS == 32) {
967 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
968 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
969 return;
972 /* Some cases can be optimized here. */
973 switch (arg2) {
974 case 0:
975 tcg_gen_movi_i64(ret, 0);
976 return;
977 case 0xffffffffffffffffull:
978 tcg_gen_mov_i64(ret, arg1);
979 return;
980 case 0xffull:
981 /* Don't recurse with tcg_gen_ext8u_i64. */
982 if (TCG_TARGET_HAS_ext8u_i64) {
983 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
984 return;
986 break;
987 case 0xffffu:
988 if (TCG_TARGET_HAS_ext16u_i64) {
989 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
990 return;
992 break;
993 case 0xffffffffull:
994 if (TCG_TARGET_HAS_ext32u_i64) {
995 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
996 return;
998 break;
1000 t0 = tcg_const_i64(arg2);
1001 tcg_gen_and_i64(ret, arg1, t0);
1002 tcg_temp_free_i64(t0);
1005 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1007 if (TCG_TARGET_REG_BITS == 32) {
1008 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1009 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1010 return;
1012 /* Some cases can be optimized here. */
1013 if (arg2 == -1) {
1014 tcg_gen_movi_i64(ret, -1);
1015 } else if (arg2 == 0) {
1016 tcg_gen_mov_i64(ret, arg1);
1017 } else {
1018 TCGv_i64 t0 = tcg_const_i64(arg2);
1019 tcg_gen_or_i64(ret, arg1, t0);
1020 tcg_temp_free_i64(t0);
1024 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1026 if (TCG_TARGET_REG_BITS == 32) {
1027 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1028 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1029 return;
1031 /* Some cases can be optimized here. */
1032 if (arg2 == 0) {
1033 tcg_gen_mov_i64(ret, arg1);
1034 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1035 /* Don't recurse with tcg_gen_not_i64. */
1036 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1037 } else {
1038 TCGv_i64 t0 = tcg_const_i64(arg2);
1039 tcg_gen_xor_i64(ret, arg1, t0);
1040 tcg_temp_free_i64(t0);
1044 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1045 unsigned c, bool right, bool arith)
1047 tcg_debug_assert(c < 64);
1048 if (c == 0) {
1049 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1050 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1051 } else if (c >= 32) {
1052 c -= 32;
1053 if (right) {
1054 if (arith) {
1055 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1056 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1057 } else {
1058 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1059 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1061 } else {
1062 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1063 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1065 } else {
1066 TCGv_i32 t0, t1;
1068 t0 = tcg_temp_new_i32();
1069 t1 = tcg_temp_new_i32();
1070 if (right) {
1071 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1072 if (arith) {
1073 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1074 } else {
1075 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1077 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1078 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1079 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1080 } else {
1081 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1082 /* Note: ret can be the same as arg1, so we use t1 */
1083 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1084 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1085 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1086 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1088 tcg_temp_free_i32(t0);
1089 tcg_temp_free_i32(t1);
1093 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1095 tcg_debug_assert(arg2 < 64);
1096 if (TCG_TARGET_REG_BITS == 32) {
1097 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1098 } else if (arg2 == 0) {
1099 tcg_gen_mov_i64(ret, arg1);
1100 } else {
1101 TCGv_i64 t0 = tcg_const_i64(arg2);
1102 tcg_gen_shl_i64(ret, arg1, t0);
1103 tcg_temp_free_i64(t0);
1107 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1109 tcg_debug_assert(arg2 < 64);
1110 if (TCG_TARGET_REG_BITS == 32) {
1111 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1112 } else if (arg2 == 0) {
1113 tcg_gen_mov_i64(ret, arg1);
1114 } else {
1115 TCGv_i64 t0 = tcg_const_i64(arg2);
1116 tcg_gen_shr_i64(ret, arg1, t0);
1117 tcg_temp_free_i64(t0);
1121 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1123 tcg_debug_assert(arg2 < 64);
1124 if (TCG_TARGET_REG_BITS == 32) {
1125 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1126 } else if (arg2 == 0) {
1127 tcg_gen_mov_i64(ret, arg1);
1128 } else {
1129 TCGv_i64 t0 = tcg_const_i64(arg2);
1130 tcg_gen_sar_i64(ret, arg1, t0);
1131 tcg_temp_free_i64(t0);
1135 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1137 if (cond == TCG_COND_ALWAYS) {
1138 tcg_gen_br(l);
1139 } else if (cond != TCG_COND_NEVER) {
1140 if (TCG_TARGET_REG_BITS == 32) {
1141 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1142 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1143 TCGV_HIGH(arg2), cond, label_arg(l));
1144 } else {
1145 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1146 label_arg(l));
1151 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1153 if (cond == TCG_COND_ALWAYS) {
1154 tcg_gen_br(l);
1155 } else if (cond != TCG_COND_NEVER) {
1156 TCGv_i64 t0 = tcg_const_i64(arg2);
1157 tcg_gen_brcond_i64(cond, arg1, t0, l);
1158 tcg_temp_free_i64(t0);
1162 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1163 TCGv_i64 arg1, TCGv_i64 arg2)
1165 if (cond == TCG_COND_ALWAYS) {
1166 tcg_gen_movi_i64(ret, 1);
1167 } else if (cond == TCG_COND_NEVER) {
1168 tcg_gen_movi_i64(ret, 0);
1169 } else {
1170 if (TCG_TARGET_REG_BITS == 32) {
1171 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1172 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1173 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1174 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1175 } else {
1176 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1181 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1182 TCGv_i64 arg1, int64_t arg2)
1184 TCGv_i64 t0 = tcg_const_i64(arg2);
1185 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1186 tcg_temp_free_i64(t0);
1189 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1191 TCGv_i64 t0 = tcg_const_i64(arg2);
1192 tcg_gen_mul_i64(ret, arg1, t0);
1193 tcg_temp_free_i64(t0);
1196 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1198 if (TCG_TARGET_HAS_div_i64) {
1199 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1200 } else if (TCG_TARGET_HAS_div2_i64) {
1201 TCGv_i64 t0 = tcg_temp_new_i64();
1202 tcg_gen_sari_i64(t0, arg1, 63);
1203 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1204 tcg_temp_free_i64(t0);
1205 } else {
1206 gen_helper_div_i64(ret, arg1, arg2);
1210 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1212 if (TCG_TARGET_HAS_rem_i64) {
1213 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1214 } else if (TCG_TARGET_HAS_div_i64) {
1215 TCGv_i64 t0 = tcg_temp_new_i64();
1216 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1217 tcg_gen_mul_i64(t0, t0, arg2);
1218 tcg_gen_sub_i64(ret, arg1, t0);
1219 tcg_temp_free_i64(t0);
1220 } else if (TCG_TARGET_HAS_div2_i64) {
1221 TCGv_i64 t0 = tcg_temp_new_i64();
1222 tcg_gen_sari_i64(t0, arg1, 63);
1223 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1224 tcg_temp_free_i64(t0);
1225 } else {
1226 gen_helper_rem_i64(ret, arg1, arg2);
1230 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1232 if (TCG_TARGET_HAS_div_i64) {
1233 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1234 } else if (TCG_TARGET_HAS_div2_i64) {
1235 TCGv_i64 t0 = tcg_temp_new_i64();
1236 tcg_gen_movi_i64(t0, 0);
1237 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1238 tcg_temp_free_i64(t0);
1239 } else {
1240 gen_helper_divu_i64(ret, arg1, arg2);
1244 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1246 if (TCG_TARGET_HAS_rem_i64) {
1247 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1248 } else if (TCG_TARGET_HAS_div_i64) {
1249 TCGv_i64 t0 = tcg_temp_new_i64();
1250 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1251 tcg_gen_mul_i64(t0, t0, arg2);
1252 tcg_gen_sub_i64(ret, arg1, t0);
1253 tcg_temp_free_i64(t0);
1254 } else if (TCG_TARGET_HAS_div2_i64) {
1255 TCGv_i64 t0 = tcg_temp_new_i64();
1256 tcg_gen_movi_i64(t0, 0);
1257 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1258 tcg_temp_free_i64(t0);
1259 } else {
1260 gen_helper_remu_i64(ret, arg1, arg2);
1264 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1266 if (TCG_TARGET_REG_BITS == 32) {
1267 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1268 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1269 } else if (TCG_TARGET_HAS_ext8s_i64) {
1270 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1271 } else {
1272 tcg_gen_shli_i64(ret, arg, 56);
1273 tcg_gen_sari_i64(ret, ret, 56);
1277 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1279 if (TCG_TARGET_REG_BITS == 32) {
1280 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1281 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1282 } else if (TCG_TARGET_HAS_ext16s_i64) {
1283 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1284 } else {
1285 tcg_gen_shli_i64(ret, arg, 48);
1286 tcg_gen_sari_i64(ret, ret, 48);
1290 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1292 if (TCG_TARGET_REG_BITS == 32) {
1293 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1294 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1295 } else if (TCG_TARGET_HAS_ext32s_i64) {
1296 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1297 } else {
1298 tcg_gen_shli_i64(ret, arg, 32);
1299 tcg_gen_sari_i64(ret, ret, 32);
1303 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1305 if (TCG_TARGET_REG_BITS == 32) {
1306 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1307 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1308 } else if (TCG_TARGET_HAS_ext8u_i64) {
1309 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1310 } else {
1311 tcg_gen_andi_i64(ret, arg, 0xffu);
1315 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1317 if (TCG_TARGET_REG_BITS == 32) {
1318 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1319 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1320 } else if (TCG_TARGET_HAS_ext16u_i64) {
1321 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1322 } else {
1323 tcg_gen_andi_i64(ret, arg, 0xffffu);
1327 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1329 if (TCG_TARGET_REG_BITS == 32) {
1330 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1331 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1332 } else if (TCG_TARGET_HAS_ext32u_i64) {
1333 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1334 } else {
1335 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1339 /* Note: we assume the six high bytes are set to zero */
1340 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1342 if (TCG_TARGET_REG_BITS == 32) {
1343 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1344 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1345 } else if (TCG_TARGET_HAS_bswap16_i64) {
1346 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1347 } else {
1348 TCGv_i64 t0 = tcg_temp_new_i64();
1350 tcg_gen_ext8u_i64(t0, arg);
1351 tcg_gen_shli_i64(t0, t0, 8);
1352 tcg_gen_shri_i64(ret, arg, 8);
1353 tcg_gen_or_i64(ret, ret, t0);
1354 tcg_temp_free_i64(t0);
1358 /* Note: we assume the four high bytes are set to zero */
1359 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1361 if (TCG_TARGET_REG_BITS == 32) {
1362 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1363 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1364 } else if (TCG_TARGET_HAS_bswap32_i64) {
1365 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1366 } else {
1367 TCGv_i64 t0, t1;
1368 t0 = tcg_temp_new_i64();
1369 t1 = tcg_temp_new_i64();
1371 tcg_gen_shli_i64(t0, arg, 24);
1372 tcg_gen_ext32u_i64(t0, t0);
1374 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1375 tcg_gen_shli_i64(t1, t1, 8);
1376 tcg_gen_or_i64(t0, t0, t1);
1378 tcg_gen_shri_i64(t1, arg, 8);
1379 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1380 tcg_gen_or_i64(t0, t0, t1);
1382 tcg_gen_shri_i64(t1, arg, 24);
1383 tcg_gen_or_i64(ret, t0, t1);
1384 tcg_temp_free_i64(t0);
1385 tcg_temp_free_i64(t1);
1389 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1391 if (TCG_TARGET_REG_BITS == 32) {
1392 TCGv_i32 t0, t1;
1393 t0 = tcg_temp_new_i32();
1394 t1 = tcg_temp_new_i32();
1396 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1397 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1398 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1399 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1400 tcg_temp_free_i32(t0);
1401 tcg_temp_free_i32(t1);
1402 } else if (TCG_TARGET_HAS_bswap64_i64) {
1403 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1404 } else {
1405 TCGv_i64 t0 = tcg_temp_new_i64();
1406 TCGv_i64 t1 = tcg_temp_new_i64();
1408 tcg_gen_shli_i64(t0, arg, 56);
1410 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1411 tcg_gen_shli_i64(t1, t1, 40);
1412 tcg_gen_or_i64(t0, t0, t1);
1414 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1415 tcg_gen_shli_i64(t1, t1, 24);
1416 tcg_gen_or_i64(t0, t0, t1);
1418 tcg_gen_andi_i64(t1, arg, 0xff000000);
1419 tcg_gen_shli_i64(t1, t1, 8);
1420 tcg_gen_or_i64(t0, t0, t1);
1422 tcg_gen_shri_i64(t1, arg, 8);
1423 tcg_gen_andi_i64(t1, t1, 0xff000000);
1424 tcg_gen_or_i64(t0, t0, t1);
1426 tcg_gen_shri_i64(t1, arg, 24);
1427 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1428 tcg_gen_or_i64(t0, t0, t1);
1430 tcg_gen_shri_i64(t1, arg, 40);
1431 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1432 tcg_gen_or_i64(t0, t0, t1);
1434 tcg_gen_shri_i64(t1, arg, 56);
1435 tcg_gen_or_i64(ret, t0, t1);
1436 tcg_temp_free_i64(t0);
1437 tcg_temp_free_i64(t1);
1441 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1443 if (TCG_TARGET_REG_BITS == 32) {
1444 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1445 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1446 } else if (TCG_TARGET_HAS_not_i64) {
1447 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1448 } else {
1449 tcg_gen_xori_i64(ret, arg, -1);
1453 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1455 if (TCG_TARGET_REG_BITS == 32) {
1456 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1457 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1458 } else if (TCG_TARGET_HAS_andc_i64) {
1459 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1460 } else {
1461 TCGv_i64 t0 = tcg_temp_new_i64();
1462 tcg_gen_not_i64(t0, arg2);
1463 tcg_gen_and_i64(ret, arg1, t0);
1464 tcg_temp_free_i64(t0);
1468 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1470 if (TCG_TARGET_REG_BITS == 32) {
1471 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1472 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1473 } else if (TCG_TARGET_HAS_eqv_i64) {
1474 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1475 } else {
1476 tcg_gen_xor_i64(ret, arg1, arg2);
1477 tcg_gen_not_i64(ret, ret);
1481 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1483 if (TCG_TARGET_REG_BITS == 32) {
1484 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1485 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1486 } else if (TCG_TARGET_HAS_nand_i64) {
1487 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1488 } else {
1489 tcg_gen_and_i64(ret, arg1, arg2);
1490 tcg_gen_not_i64(ret, ret);
1494 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1496 if (TCG_TARGET_REG_BITS == 32) {
1497 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1498 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1499 } else if (TCG_TARGET_HAS_nor_i64) {
1500 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1501 } else {
1502 tcg_gen_or_i64(ret, arg1, arg2);
1503 tcg_gen_not_i64(ret, ret);
1507 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1509 if (TCG_TARGET_REG_BITS == 32) {
1510 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1511 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1512 } else if (TCG_TARGET_HAS_orc_i64) {
1513 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1514 } else {
1515 TCGv_i64 t0 = tcg_temp_new_i64();
1516 tcg_gen_not_i64(t0, arg2);
1517 tcg_gen_or_i64(ret, arg1, t0);
1518 tcg_temp_free_i64(t0);
1522 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1524 if (TCG_TARGET_HAS_rot_i64) {
1525 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1526 } else {
1527 TCGv_i64 t0, t1;
1528 t0 = tcg_temp_new_i64();
1529 t1 = tcg_temp_new_i64();
1530 tcg_gen_shl_i64(t0, arg1, arg2);
1531 tcg_gen_subfi_i64(t1, 64, arg2);
1532 tcg_gen_shr_i64(t1, arg1, t1);
1533 tcg_gen_or_i64(ret, t0, t1);
1534 tcg_temp_free_i64(t0);
1535 tcg_temp_free_i64(t1);
1539 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1541 tcg_debug_assert(arg2 < 64);
1542 /* some cases can be optimized here */
1543 if (arg2 == 0) {
1544 tcg_gen_mov_i64(ret, arg1);
1545 } else if (TCG_TARGET_HAS_rot_i64) {
1546 TCGv_i64 t0 = tcg_const_i64(arg2);
1547 tcg_gen_rotl_i64(ret, arg1, t0);
1548 tcg_temp_free_i64(t0);
1549 } else {
1550 TCGv_i64 t0, t1;
1551 t0 = tcg_temp_new_i64();
1552 t1 = tcg_temp_new_i64();
1553 tcg_gen_shli_i64(t0, arg1, arg2);
1554 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1555 tcg_gen_or_i64(ret, t0, t1);
1556 tcg_temp_free_i64(t0);
1557 tcg_temp_free_i64(t1);
1561 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1563 if (TCG_TARGET_HAS_rot_i64) {
1564 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1565 } else {
1566 TCGv_i64 t0, t1;
1567 t0 = tcg_temp_new_i64();
1568 t1 = tcg_temp_new_i64();
1569 tcg_gen_shr_i64(t0, arg1, arg2);
1570 tcg_gen_subfi_i64(t1, 64, arg2);
1571 tcg_gen_shl_i64(t1, arg1, t1);
1572 tcg_gen_or_i64(ret, t0, t1);
1573 tcg_temp_free_i64(t0);
1574 tcg_temp_free_i64(t1);
1578 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1580 tcg_debug_assert(arg2 < 64);
1581 /* some cases can be optimized here */
1582 if (arg2 == 0) {
1583 tcg_gen_mov_i64(ret, arg1);
1584 } else {
1585 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1589 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1590 unsigned int ofs, unsigned int len)
1592 uint64_t mask;
1593 TCGv_i64 t1;
1595 tcg_debug_assert(ofs < 64);
1596 tcg_debug_assert(len <= 64);
1597 tcg_debug_assert(ofs + len <= 64);
1599 if (ofs == 0 && len == 64) {
1600 tcg_gen_mov_i64(ret, arg2);
1601 return;
1603 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1604 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1605 return;
1608 if (TCG_TARGET_REG_BITS == 32) {
1609 if (ofs >= 32) {
1610 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1611 TCGV_LOW(arg2), ofs - 32, len);
1612 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1613 return;
1615 if (ofs + len <= 32) {
1616 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1617 TCGV_LOW(arg2), ofs, len);
1618 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1619 return;
1623 mask = (1ull << len) - 1;
1624 t1 = tcg_temp_new_i64();
1626 if (ofs + len < 64) {
1627 tcg_gen_andi_i64(t1, arg2, mask);
1628 tcg_gen_shli_i64(t1, t1, ofs);
1629 } else {
1630 tcg_gen_shli_i64(t1, arg2, ofs);
1632 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1633 tcg_gen_or_i64(ret, ret, t1);
1635 tcg_temp_free_i64(t1);
1638 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1639 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1641 if (cond == TCG_COND_ALWAYS) {
1642 tcg_gen_mov_i64(ret, v1);
1643 } else if (cond == TCG_COND_NEVER) {
1644 tcg_gen_mov_i64(ret, v2);
1645 } else if (TCG_TARGET_REG_BITS == 32) {
1646 TCGv_i32 t0 = tcg_temp_new_i32();
1647 TCGv_i32 t1 = tcg_temp_new_i32();
1648 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1649 TCGV_LOW(c1), TCGV_HIGH(c1),
1650 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1652 if (TCG_TARGET_HAS_movcond_i32) {
1653 tcg_gen_movi_i32(t1, 0);
1654 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1655 TCGV_LOW(v1), TCGV_LOW(v2));
1656 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1657 TCGV_HIGH(v1), TCGV_HIGH(v2));
1658 } else {
1659 tcg_gen_neg_i32(t0, t0);
1661 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1662 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1663 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1665 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1666 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1667 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1669 tcg_temp_free_i32(t0);
1670 tcg_temp_free_i32(t1);
1671 } else if (TCG_TARGET_HAS_movcond_i64) {
1672 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1673 } else {
1674 TCGv_i64 t0 = tcg_temp_new_i64();
1675 TCGv_i64 t1 = tcg_temp_new_i64();
1676 tcg_gen_setcond_i64(cond, t0, c1, c2);
1677 tcg_gen_neg_i64(t0, t0);
1678 tcg_gen_and_i64(t1, v1, t0);
1679 tcg_gen_andc_i64(ret, v2, t0);
1680 tcg_gen_or_i64(ret, ret, t1);
1681 tcg_temp_free_i64(t0);
1682 tcg_temp_free_i64(t1);
1686 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1687 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1689 if (TCG_TARGET_HAS_add2_i64) {
1690 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1691 } else {
1692 TCGv_i64 t0 = tcg_temp_new_i64();
1693 TCGv_i64 t1 = tcg_temp_new_i64();
1694 tcg_gen_add_i64(t0, al, bl);
1695 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1696 tcg_gen_add_i64(rh, ah, bh);
1697 tcg_gen_add_i64(rh, rh, t1);
1698 tcg_gen_mov_i64(rl, t0);
1699 tcg_temp_free_i64(t0);
1700 tcg_temp_free_i64(t1);
1704 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1705 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1707 if (TCG_TARGET_HAS_sub2_i64) {
1708 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1709 } else {
1710 TCGv_i64 t0 = tcg_temp_new_i64();
1711 TCGv_i64 t1 = tcg_temp_new_i64();
1712 tcg_gen_sub_i64(t0, al, bl);
1713 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1714 tcg_gen_sub_i64(rh, ah, bh);
1715 tcg_gen_sub_i64(rh, rh, t1);
1716 tcg_gen_mov_i64(rl, t0);
1717 tcg_temp_free_i64(t0);
1718 tcg_temp_free_i64(t1);
1722 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1724 if (TCG_TARGET_HAS_mulu2_i64) {
1725 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1726 } else if (TCG_TARGET_HAS_muluh_i64) {
1727 TCGv_i64 t = tcg_temp_new_i64();
1728 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1729 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1730 tcg_gen_mov_i64(rl, t);
1731 tcg_temp_free_i64(t);
1732 } else {
1733 TCGv_i64 t0 = tcg_temp_new_i64();
1734 tcg_gen_mul_i64(t0, arg1, arg2);
1735 gen_helper_muluh_i64(rh, arg1, arg2);
1736 tcg_gen_mov_i64(rl, t0);
1737 tcg_temp_free_i64(t0);
1741 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1743 if (TCG_TARGET_HAS_muls2_i64) {
1744 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1745 } else if (TCG_TARGET_HAS_mulsh_i64) {
1746 TCGv_i64 t = tcg_temp_new_i64();
1747 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1748 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1749 tcg_gen_mov_i64(rl, t);
1750 tcg_temp_free_i64(t);
1751 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1752 TCGv_i64 t0 = tcg_temp_new_i64();
1753 TCGv_i64 t1 = tcg_temp_new_i64();
1754 TCGv_i64 t2 = tcg_temp_new_i64();
1755 TCGv_i64 t3 = tcg_temp_new_i64();
1756 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1757 /* Adjust for negative inputs. */
1758 tcg_gen_sari_i64(t2, arg1, 63);
1759 tcg_gen_sari_i64(t3, arg2, 63);
1760 tcg_gen_and_i64(t2, t2, arg2);
1761 tcg_gen_and_i64(t3, t3, arg1);
1762 tcg_gen_sub_i64(rh, t1, t2);
1763 tcg_gen_sub_i64(rh, rh, t3);
1764 tcg_gen_mov_i64(rl, t0);
1765 tcg_temp_free_i64(t0);
1766 tcg_temp_free_i64(t1);
1767 tcg_temp_free_i64(t2);
1768 tcg_temp_free_i64(t3);
1769 } else {
1770 TCGv_i64 t0 = tcg_temp_new_i64();
1771 tcg_gen_mul_i64(t0, arg1, arg2);
1772 gen_helper_mulsh_i64(rh, arg1, arg2);
1773 tcg_gen_mov_i64(rl, t0);
1774 tcg_temp_free_i64(t0);
1778 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1780 TCGv_i64 t0 = tcg_temp_new_i64();
1781 TCGv_i64 t1 = tcg_temp_new_i64();
1782 TCGv_i64 t2 = tcg_temp_new_i64();
1783 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1784 /* Adjust for negative input for the signed arg1. */
1785 tcg_gen_sari_i64(t2, arg1, 63);
1786 tcg_gen_and_i64(t2, t2, arg2);
1787 tcg_gen_sub_i64(rh, t1, t2);
1788 tcg_gen_mov_i64(rl, t0);
1789 tcg_temp_free_i64(t0);
1790 tcg_temp_free_i64(t1);
1791 tcg_temp_free_i64(t2);
1794 /* Size changing operations. */
1796 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1798 if (TCG_TARGET_REG_BITS == 32) {
1799 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1800 } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1801 tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1802 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1803 } else {
1804 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1808 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1810 if (TCG_TARGET_REG_BITS == 32) {
1811 tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1812 } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1813 tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1814 GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1815 } else {
1816 TCGv_i64 t = tcg_temp_new_i64();
1817 tcg_gen_shri_i64(t, arg, 32);
1818 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1819 tcg_temp_free_i64(t);
1823 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1825 if (TCG_TARGET_REG_BITS == 32) {
1826 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1827 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1828 } else {
1829 tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1830 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1834 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1836 if (TCG_TARGET_REG_BITS == 32) {
1837 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1838 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1839 } else {
1840 tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1841 GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1845 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1847 TCGv_i64 tmp;
1849 if (TCG_TARGET_REG_BITS == 32) {
1850 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1851 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1852 return;
1855 tmp = tcg_temp_new_i64();
1856 /* These extensions are only needed for type correctness.
1857 We may be able to do better given target specific information. */
1858 tcg_gen_extu_i32_i64(tmp, high);
1859 tcg_gen_extu_i32_i64(dest, low);
1860 /* If deposit is available, use it. Otherwise use the extra
1861 knowledge that we have of the zero-extensions above. */
1862 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1863 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1864 } else {
1865 tcg_gen_shli_i64(tmp, tmp, 32);
1866 tcg_gen_or_i64(dest, dest, tmp);
1868 tcg_temp_free_i64(tmp);
1871 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1873 if (TCG_TARGET_REG_BITS == 32) {
1874 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1875 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1876 } else {
1877 tcg_gen_extrl_i64_i32(lo, arg);
1878 tcg_gen_extrh_i64_i32(hi, arg);
1882 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1884 tcg_gen_ext32u_i64(lo, arg);
1885 tcg_gen_shri_i64(hi, arg, 32);
1888 /* QEMU specific operations. */
1890 void tcg_gen_goto_tb(unsigned idx)
1892 /* We only support two chained exits. */
1893 tcg_debug_assert(idx <= 1);
1894 #ifdef CONFIG_DEBUG_TCG
1895 /* Verify that we havn't seen this numbered exit before. */
1896 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1897 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1898 #endif
1899 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1902 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1904 /* Trigger the asserts within as early as possible. */
1905 (void)get_alignment_bits(op);
1907 switch (op & MO_SIZE) {
1908 case MO_8:
1909 op &= ~MO_BSWAP;
1910 break;
1911 case MO_16:
1912 break;
1913 case MO_32:
1914 if (!is64) {
1915 op &= ~MO_SIGN;
1917 break;
1918 case MO_64:
1919 if (!is64) {
1920 tcg_abort();
1922 break;
1924 if (st) {
1925 op &= ~MO_SIGN;
1927 return op;
1930 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1931 TCGMemOp memop, TCGArg idx)
1933 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1934 #if TARGET_LONG_BITS == 32
1935 tcg_gen_op3i_i32(opc, val, addr, oi);
1936 #else
1937 if (TCG_TARGET_REG_BITS == 32) {
1938 tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1939 } else {
1940 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1942 #endif
1945 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1946 TCGMemOp memop, TCGArg idx)
1948 TCGMemOpIdx oi = make_memop_idx(memop, idx);
1949 #if TARGET_LONG_BITS == 32
1950 if (TCG_TARGET_REG_BITS == 32) {
1951 tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1952 } else {
1953 tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1955 #else
1956 if (TCG_TARGET_REG_BITS == 32) {
1957 tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1958 TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1959 } else {
1960 tcg_gen_op3i_i64(opc, val, addr, oi);
1962 #endif
1965 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1967 memop = tcg_canonicalize_memop(memop, 0, 0);
1968 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1969 addr, trace_mem_get_info(memop, 0));
1970 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1973 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1975 memop = tcg_canonicalize_memop(memop, 0, 1);
1976 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1977 addr, trace_mem_get_info(memop, 1));
1978 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1981 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1983 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1984 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1985 if (memop & MO_SIGN) {
1986 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1987 } else {
1988 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1990 return;
1993 memop = tcg_canonicalize_memop(memop, 1, 0);
1994 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1995 addr, trace_mem_get_info(memop, 0));
1996 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1999 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
2001 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2002 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2003 return;
2006 memop = tcg_canonicalize_memop(memop, 1, 1);
2007 trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
2008 addr, trace_mem_get_info(memop, 1));
2009 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2012 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, TCGMemOp opc)
2014 switch (opc & MO_SSIZE) {
2015 case MO_SB:
2016 tcg_gen_ext8s_i32(ret, val);
2017 break;
2018 case MO_UB:
2019 tcg_gen_ext8u_i32(ret, val);
2020 break;
2021 case MO_SW:
2022 tcg_gen_ext16s_i32(ret, val);
2023 break;
2024 case MO_UW:
2025 tcg_gen_ext16u_i32(ret, val);
2026 break;
2027 default:
2028 tcg_gen_mov_i32(ret, val);
2029 break;
2033 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, TCGMemOp opc)
2035 switch (opc & MO_SSIZE) {
2036 case MO_SB:
2037 tcg_gen_ext8s_i64(ret, val);
2038 break;
2039 case MO_UB:
2040 tcg_gen_ext8u_i64(ret, val);
2041 break;
2042 case MO_SW:
2043 tcg_gen_ext16s_i64(ret, val);
2044 break;
2045 case MO_UW:
2046 tcg_gen_ext16u_i64(ret, val);
2047 break;
2048 case MO_SL:
2049 tcg_gen_ext32s_i64(ret, val);
2050 break;
2051 case MO_UL:
2052 tcg_gen_ext32u_i64(ret, val);
2053 break;
2054 default:
2055 tcg_gen_mov_i64(ret, val);
2056 break;
2060 #ifdef CONFIG_SOFTMMU
2061 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
2062 TCGv_i32, TCGv_i32, TCGv_i32);
2063 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
2064 TCGv_i64, TCGv_i64, TCGv_i32);
2065 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
2066 TCGv_i32, TCGv_i32);
2067 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
2068 TCGv_i64, TCGv_i32);
2069 #else
2070 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
2071 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
2072 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
2073 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
2074 #endif
2076 #ifdef CONFIG_ATOMIC64
2077 # define WITH_ATOMIC64(X) X,
2078 #else
2079 # define WITH_ATOMIC64(X)
2080 #endif
2082 static void * const table_cmpxchg[16] = {
2083 [MO_8] = gen_helper_atomic_cmpxchgb,
2084 [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
2085 [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
2086 [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
2087 [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
2088 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
2089 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
2092 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
2093 TCGv_i32 newv, TCGArg idx, TCGMemOp memop)
2095 memop = tcg_canonicalize_memop(memop, 0, 0);
2097 if (!parallel_cpus) {
2098 TCGv_i32 t1 = tcg_temp_new_i32();
2099 TCGv_i32 t2 = tcg_temp_new_i32();
2101 tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
2103 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2104 tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
2105 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2106 tcg_temp_free_i32(t2);
2108 if (memop & MO_SIGN) {
2109 tcg_gen_ext_i32(retv, t1, memop);
2110 } else {
2111 tcg_gen_mov_i32(retv, t1);
2113 tcg_temp_free_i32(t1);
2114 } else {
2115 gen_atomic_cx_i32 gen;
2117 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2118 tcg_debug_assert(gen != NULL);
2120 #ifdef CONFIG_SOFTMMU
2122 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2123 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2124 tcg_temp_free_i32(oi);
2126 #else
2127 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2128 #endif
2130 if (memop & MO_SIGN) {
2131 tcg_gen_ext_i32(retv, retv, memop);
2136 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
2137 TCGv_i64 newv, TCGArg idx, TCGMemOp memop)
2139 memop = tcg_canonicalize_memop(memop, 1, 0);
2141 if (!parallel_cpus) {
2142 TCGv_i64 t1 = tcg_temp_new_i64();
2143 TCGv_i64 t2 = tcg_temp_new_i64();
2145 tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
2147 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2148 tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
2149 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2150 tcg_temp_free_i64(t2);
2152 if (memop & MO_SIGN) {
2153 tcg_gen_ext_i64(retv, t1, memop);
2154 } else {
2155 tcg_gen_mov_i64(retv, t1);
2157 tcg_temp_free_i64(t1);
2158 } else if ((memop & MO_SIZE) == MO_64) {
2159 #ifdef CONFIG_ATOMIC64
2160 gen_atomic_cx_i64 gen;
2162 gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
2163 tcg_debug_assert(gen != NULL);
2165 #ifdef CONFIG_SOFTMMU
2167 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
2168 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv, oi);
2169 tcg_temp_free_i32(oi);
2171 #else
2172 gen(retv, tcg_ctx.tcg_env, addr, cmpv, newv);
2173 #endif
2174 #else
2175 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2176 #endif /* CONFIG_ATOMIC64 */
2177 } else {
2178 TCGv_i32 c32 = tcg_temp_new_i32();
2179 TCGv_i32 n32 = tcg_temp_new_i32();
2180 TCGv_i32 r32 = tcg_temp_new_i32();
2182 tcg_gen_extrl_i64_i32(c32, cmpv);
2183 tcg_gen_extrl_i64_i32(n32, newv);
2184 tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
2185 tcg_temp_free_i32(c32);
2186 tcg_temp_free_i32(n32);
2188 tcg_gen_extu_i32_i64(retv, r32);
2189 tcg_temp_free_i32(r32);
2191 if (memop & MO_SIGN) {
2192 tcg_gen_ext_i64(retv, retv, memop);
2197 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2198 TCGArg idx, TCGMemOp memop, bool new_val,
2199 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
2201 TCGv_i32 t1 = tcg_temp_new_i32();
2202 TCGv_i32 t2 = tcg_temp_new_i32();
2204 memop = tcg_canonicalize_memop(memop, 0, 0);
2206 tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
2207 gen(t2, t1, val);
2208 tcg_gen_qemu_st_i32(t2, addr, idx, memop);
2210 tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
2211 tcg_temp_free_i32(t1);
2212 tcg_temp_free_i32(t2);
2215 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
2216 TCGArg idx, TCGMemOp memop, void * const table[])
2218 gen_atomic_op_i32 gen;
2220 memop = tcg_canonicalize_memop(memop, 0, 0);
2222 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2223 tcg_debug_assert(gen != NULL);
2225 #ifdef CONFIG_SOFTMMU
2227 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2228 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2229 tcg_temp_free_i32(oi);
2231 #else
2232 gen(ret, tcg_ctx.tcg_env, addr, val);
2233 #endif
2235 if (memop & MO_SIGN) {
2236 tcg_gen_ext_i32(ret, ret, memop);
2240 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2241 TCGArg idx, TCGMemOp memop, bool new_val,
2242 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
2244 TCGv_i64 t1 = tcg_temp_new_i64();
2245 TCGv_i64 t2 = tcg_temp_new_i64();
2247 memop = tcg_canonicalize_memop(memop, 1, 0);
2249 tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
2250 gen(t2, t1, val);
2251 tcg_gen_qemu_st_i64(t2, addr, idx, memop);
2253 tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
2254 tcg_temp_free_i64(t1);
2255 tcg_temp_free_i64(t2);
2258 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
2259 TCGArg idx, TCGMemOp memop, void * const table[])
2261 memop = tcg_canonicalize_memop(memop, 1, 0);
2263 if ((memop & MO_SIZE) == MO_64) {
2264 #ifdef CONFIG_ATOMIC64
2265 gen_atomic_op_i64 gen;
2267 gen = table[memop & (MO_SIZE | MO_BSWAP)];
2268 tcg_debug_assert(gen != NULL);
2270 #ifdef CONFIG_SOFTMMU
2272 TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
2273 gen(ret, tcg_ctx.tcg_env, addr, val, oi);
2274 tcg_temp_free_i32(oi);
2276 #else
2277 gen(ret, tcg_ctx.tcg_env, addr, val);
2278 #endif
2279 #else
2280 gen_helper_exit_atomic(tcg_ctx.tcg_env);
2281 #endif /* CONFIG_ATOMIC64 */
2282 } else {
2283 TCGv_i32 v32 = tcg_temp_new_i32();
2284 TCGv_i32 r32 = tcg_temp_new_i32();
2286 tcg_gen_extrl_i64_i32(v32, val);
2287 do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
2288 tcg_temp_free_i32(v32);
2290 tcg_gen_extu_i32_i64(ret, r32);
2291 tcg_temp_free_i32(r32);
2293 if (memop & MO_SIGN) {
2294 tcg_gen_ext_i64(ret, ret, memop);
2299 #define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2300 static void * const table_##NAME[16] = { \
2301 [MO_8] = gen_helper_atomic_##NAME##b, \
2302 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2303 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2304 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2305 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
2306 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2307 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
2308 }; \
2309 void tcg_gen_atomic_##NAME##_i32 \
2310 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
2312 if (parallel_cpus) { \
2313 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
2314 } else { \
2315 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
2316 tcg_gen_##OP##_i32); \
2319 void tcg_gen_atomic_##NAME##_i64 \
2320 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
2322 if (parallel_cpus) { \
2323 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
2324 } else { \
2325 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
2326 tcg_gen_##OP##_i64); \
2330 GEN_ATOMIC_HELPER(fetch_add, add, 0)
2331 GEN_ATOMIC_HELPER(fetch_and, and, 0)
2332 GEN_ATOMIC_HELPER(fetch_or, or, 0)
2333 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
2335 GEN_ATOMIC_HELPER(add_fetch, add, 1)
2336 GEN_ATOMIC_HELPER(and_fetch, and, 1)
2337 GEN_ATOMIC_HELPER(or_fetch, or, 1)
2338 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
2340 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
2342 tcg_gen_mov_i32(r, b);
2345 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
2347 tcg_gen_mov_i64(r, b);
2350 GEN_ATOMIC_HELPER(xchg, mov2, 0)
2352 #undef GEN_ATOMIC_HELPER