target-i386: Rename cpu_x86_init() to cpu_x86_init_user()
[qemu/ar7.git] / tcg / tcg-op.c
blobafa351dc70ceed402bf438c249eb0b652dad8263
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #include "tcg.h"
26 #include "tcg-op.h"
28 /* Reduce the number of ifdefs below. This assumes that all uses of
29 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
30 the compiler can eliminate. */
31 #if TCG_TARGET_REG_BITS == 64
32 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
33 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
34 #define TCGV_LOW TCGV_LOW_link_error
35 #define TCGV_HIGH TCGV_HIGH_link_error
36 #endif
38 /* Note that this is optimized for sequential allocation during translate.
39 Up to and including filling in the forward link immediately. We'll do
40 proper termination of the end of the list after we finish translation. */
42 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
44 int oi = ctx->gen_next_op_idx;
45 int ni = oi + 1;
46 int pi = oi - 1;
48 tcg_debug_assert(oi < OPC_BUF_SIZE);
49 ctx->gen_last_op_idx = oi;
50 ctx->gen_next_op_idx = ni;
52 ctx->gen_op_buf[oi] = (TCGOp){
53 .opc = opc,
54 .args = args,
55 .prev = pi,
56 .next = ni
60 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
62 int pi = ctx->gen_next_parm_idx;
64 tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
65 ctx->gen_next_parm_idx = pi + 1;
66 ctx->gen_opparam_buf[pi] = a1;
68 tcg_emit_op(ctx, opc, pi);
71 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
73 int pi = ctx->gen_next_parm_idx;
75 tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
76 ctx->gen_next_parm_idx = pi + 2;
77 ctx->gen_opparam_buf[pi + 0] = a1;
78 ctx->gen_opparam_buf[pi + 1] = a2;
80 tcg_emit_op(ctx, opc, pi);
83 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
84 TCGArg a2, TCGArg a3)
86 int pi = ctx->gen_next_parm_idx;
88 tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
89 ctx->gen_next_parm_idx = pi + 3;
90 ctx->gen_opparam_buf[pi + 0] = a1;
91 ctx->gen_opparam_buf[pi + 1] = a2;
92 ctx->gen_opparam_buf[pi + 2] = a3;
94 tcg_emit_op(ctx, opc, pi);
97 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
98 TCGArg a2, TCGArg a3, TCGArg a4)
100 int pi = ctx->gen_next_parm_idx;
102 tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
103 ctx->gen_next_parm_idx = pi + 4;
104 ctx->gen_opparam_buf[pi + 0] = a1;
105 ctx->gen_opparam_buf[pi + 1] = a2;
106 ctx->gen_opparam_buf[pi + 2] = a3;
107 ctx->gen_opparam_buf[pi + 3] = a4;
109 tcg_emit_op(ctx, opc, pi);
112 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
113 TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
115 int pi = ctx->gen_next_parm_idx;
117 tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
118 ctx->gen_next_parm_idx = pi + 5;
119 ctx->gen_opparam_buf[pi + 0] = a1;
120 ctx->gen_opparam_buf[pi + 1] = a2;
121 ctx->gen_opparam_buf[pi + 2] = a3;
122 ctx->gen_opparam_buf[pi + 3] = a4;
123 ctx->gen_opparam_buf[pi + 4] = a5;
125 tcg_emit_op(ctx, opc, pi);
128 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
129 TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
131 int pi = ctx->gen_next_parm_idx;
133 tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
134 ctx->gen_next_parm_idx = pi + 6;
135 ctx->gen_opparam_buf[pi + 0] = a1;
136 ctx->gen_opparam_buf[pi + 1] = a2;
137 ctx->gen_opparam_buf[pi + 2] = a3;
138 ctx->gen_opparam_buf[pi + 3] = a4;
139 ctx->gen_opparam_buf[pi + 4] = a5;
140 ctx->gen_opparam_buf[pi + 5] = a6;
142 tcg_emit_op(ctx, opc, pi);
145 /* 32 bit ops */
147 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
149 /* some cases can be optimized here */
150 if (arg2 == 0) {
151 tcg_gen_mov_i32(ret, arg1);
152 } else {
153 TCGv_i32 t0 = tcg_const_i32(arg2);
154 tcg_gen_add_i32(ret, arg1, t0);
155 tcg_temp_free_i32(t0);
159 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
161 if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
162 /* Don't recurse with tcg_gen_neg_i32. */
163 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
164 } else {
165 TCGv_i32 t0 = tcg_const_i32(arg1);
166 tcg_gen_sub_i32(ret, t0, arg2);
167 tcg_temp_free_i32(t0);
171 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
173 /* some cases can be optimized here */
174 if (arg2 == 0) {
175 tcg_gen_mov_i32(ret, arg1);
176 } else {
177 TCGv_i32 t0 = tcg_const_i32(arg2);
178 tcg_gen_sub_i32(ret, arg1, t0);
179 tcg_temp_free_i32(t0);
183 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
185 TCGv_i32 t0;
186 /* Some cases can be optimized here. */
187 switch (arg2) {
188 case 0:
189 tcg_gen_movi_i32(ret, 0);
190 return;
191 case 0xffffffffu:
192 tcg_gen_mov_i32(ret, arg1);
193 return;
194 case 0xffu:
195 /* Don't recurse with tcg_gen_ext8u_i32. */
196 if (TCG_TARGET_HAS_ext8u_i32) {
197 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
198 return;
200 break;
201 case 0xffffu:
202 if (TCG_TARGET_HAS_ext16u_i32) {
203 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
204 return;
206 break;
208 t0 = tcg_const_i32(arg2);
209 tcg_gen_and_i32(ret, arg1, t0);
210 tcg_temp_free_i32(t0);
213 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
215 /* Some cases can be optimized here. */
216 if (arg2 == -1) {
217 tcg_gen_movi_i32(ret, -1);
218 } else if (arg2 == 0) {
219 tcg_gen_mov_i32(ret, arg1);
220 } else {
221 TCGv_i32 t0 = tcg_const_i32(arg2);
222 tcg_gen_or_i32(ret, arg1, t0);
223 tcg_temp_free_i32(t0);
227 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
229 /* Some cases can be optimized here. */
230 if (arg2 == 0) {
231 tcg_gen_mov_i32(ret, arg1);
232 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
233 /* Don't recurse with tcg_gen_not_i32. */
234 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
235 } else {
236 TCGv_i32 t0 = tcg_const_i32(arg2);
237 tcg_gen_xor_i32(ret, arg1, t0);
238 tcg_temp_free_i32(t0);
242 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
244 tcg_debug_assert(arg2 < 32);
245 if (arg2 == 0) {
246 tcg_gen_mov_i32(ret, arg1);
247 } else {
248 TCGv_i32 t0 = tcg_const_i32(arg2);
249 tcg_gen_shl_i32(ret, arg1, t0);
250 tcg_temp_free_i32(t0);
254 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
256 tcg_debug_assert(arg2 < 32);
257 if (arg2 == 0) {
258 tcg_gen_mov_i32(ret, arg1);
259 } else {
260 TCGv_i32 t0 = tcg_const_i32(arg2);
261 tcg_gen_shr_i32(ret, arg1, t0);
262 tcg_temp_free_i32(t0);
266 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
268 tcg_debug_assert(arg2 < 32);
269 if (arg2 == 0) {
270 tcg_gen_mov_i32(ret, arg1);
271 } else {
272 TCGv_i32 t0 = tcg_const_i32(arg2);
273 tcg_gen_sar_i32(ret, arg1, t0);
274 tcg_temp_free_i32(t0);
278 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, int label)
280 if (cond == TCG_COND_ALWAYS) {
281 tcg_gen_br(label);
282 } else if (cond != TCG_COND_NEVER) {
283 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label);
287 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, int label)
289 TCGv_i32 t0 = tcg_const_i32(arg2);
290 tcg_gen_brcond_i32(cond, arg1, t0, label);
291 tcg_temp_free_i32(t0);
294 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
295 TCGv_i32 arg1, TCGv_i32 arg2)
297 if (cond == TCG_COND_ALWAYS) {
298 tcg_gen_movi_i32(ret, 1);
299 } else if (cond == TCG_COND_NEVER) {
300 tcg_gen_movi_i32(ret, 0);
301 } else {
302 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
306 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
307 TCGv_i32 arg1, int32_t arg2)
309 TCGv_i32 t0 = tcg_const_i32(arg2);
310 tcg_gen_setcond_i32(cond, ret, arg1, t0);
311 tcg_temp_free_i32(t0);
314 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
316 TCGv_i32 t0 = tcg_const_i32(arg2);
317 tcg_gen_mul_i32(ret, arg1, t0);
318 tcg_temp_free_i32(t0);
321 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
323 if (TCG_TARGET_HAS_div_i32) {
324 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
325 } else if (TCG_TARGET_HAS_div2_i32) {
326 TCGv_i32 t0 = tcg_temp_new_i32();
327 tcg_gen_sari_i32(t0, arg1, 31);
328 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
329 tcg_temp_free_i32(t0);
330 } else {
331 gen_helper_div_i32(ret, arg1, arg2);
335 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
337 if (TCG_TARGET_HAS_rem_i32) {
338 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
339 } else if (TCG_TARGET_HAS_div_i32) {
340 TCGv_i32 t0 = tcg_temp_new_i32();
341 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
342 tcg_gen_mul_i32(t0, t0, arg2);
343 tcg_gen_sub_i32(ret, arg1, t0);
344 tcg_temp_free_i32(t0);
345 } else if (TCG_TARGET_HAS_div2_i32) {
346 TCGv_i32 t0 = tcg_temp_new_i32();
347 tcg_gen_sari_i32(t0, arg1, 31);
348 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
349 tcg_temp_free_i32(t0);
350 } else {
351 gen_helper_rem_i32(ret, arg1, arg2);
355 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
357 if (TCG_TARGET_HAS_div_i32) {
358 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
359 } else if (TCG_TARGET_HAS_div2_i32) {
360 TCGv_i32 t0 = tcg_temp_new_i32();
361 tcg_gen_movi_i32(t0, 0);
362 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
363 tcg_temp_free_i32(t0);
364 } else {
365 gen_helper_divu_i32(ret, arg1, arg2);
369 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
371 if (TCG_TARGET_HAS_rem_i32) {
372 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
373 } else if (TCG_TARGET_HAS_div_i32) {
374 TCGv_i32 t0 = tcg_temp_new_i32();
375 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
376 tcg_gen_mul_i32(t0, t0, arg2);
377 tcg_gen_sub_i32(ret, arg1, t0);
378 tcg_temp_free_i32(t0);
379 } else if (TCG_TARGET_HAS_div2_i32) {
380 TCGv_i32 t0 = tcg_temp_new_i32();
381 tcg_gen_movi_i32(t0, 0);
382 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
383 tcg_temp_free_i32(t0);
384 } else {
385 gen_helper_remu_i32(ret, arg1, arg2);
389 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
391 if (TCG_TARGET_HAS_andc_i32) {
392 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
393 } else {
394 TCGv_i32 t0 = tcg_temp_new_i32();
395 tcg_gen_not_i32(t0, arg2);
396 tcg_gen_and_i32(ret, arg1, t0);
397 tcg_temp_free_i32(t0);
401 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
403 if (TCG_TARGET_HAS_eqv_i32) {
404 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
405 } else {
406 tcg_gen_xor_i32(ret, arg1, arg2);
407 tcg_gen_not_i32(ret, ret);
411 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
413 if (TCG_TARGET_HAS_nand_i32) {
414 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
415 } else {
416 tcg_gen_and_i32(ret, arg1, arg2);
417 tcg_gen_not_i32(ret, ret);
421 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
423 if (TCG_TARGET_HAS_nor_i32) {
424 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
425 } else {
426 tcg_gen_or_i32(ret, arg1, arg2);
427 tcg_gen_not_i32(ret, ret);
431 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
433 if (TCG_TARGET_HAS_orc_i32) {
434 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
435 } else {
436 TCGv_i32 t0 = tcg_temp_new_i32();
437 tcg_gen_not_i32(t0, arg2);
438 tcg_gen_or_i32(ret, arg1, t0);
439 tcg_temp_free_i32(t0);
443 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
445 if (TCG_TARGET_HAS_rot_i32) {
446 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
447 } else {
448 TCGv_i32 t0, t1;
450 t0 = tcg_temp_new_i32();
451 t1 = tcg_temp_new_i32();
452 tcg_gen_shl_i32(t0, arg1, arg2);
453 tcg_gen_subfi_i32(t1, 32, arg2);
454 tcg_gen_shr_i32(t1, arg1, t1);
455 tcg_gen_or_i32(ret, t0, t1);
456 tcg_temp_free_i32(t0);
457 tcg_temp_free_i32(t1);
461 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
463 tcg_debug_assert(arg2 < 32);
464 /* some cases can be optimized here */
465 if (arg2 == 0) {
466 tcg_gen_mov_i32(ret, arg1);
467 } else if (TCG_TARGET_HAS_rot_i32) {
468 TCGv_i32 t0 = tcg_const_i32(arg2);
469 tcg_gen_rotl_i32(ret, arg1, t0);
470 tcg_temp_free_i32(t0);
471 } else {
472 TCGv_i32 t0, t1;
473 t0 = tcg_temp_new_i32();
474 t1 = tcg_temp_new_i32();
475 tcg_gen_shli_i32(t0, arg1, arg2);
476 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
477 tcg_gen_or_i32(ret, t0, t1);
478 tcg_temp_free_i32(t0);
479 tcg_temp_free_i32(t1);
483 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
485 if (TCG_TARGET_HAS_rot_i32) {
486 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
487 } else {
488 TCGv_i32 t0, t1;
490 t0 = tcg_temp_new_i32();
491 t1 = tcg_temp_new_i32();
492 tcg_gen_shr_i32(t0, arg1, arg2);
493 tcg_gen_subfi_i32(t1, 32, arg2);
494 tcg_gen_shl_i32(t1, arg1, t1);
495 tcg_gen_or_i32(ret, t0, t1);
496 tcg_temp_free_i32(t0);
497 tcg_temp_free_i32(t1);
501 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
503 tcg_debug_assert(arg2 < 32);
504 /* some cases can be optimized here */
505 if (arg2 == 0) {
506 tcg_gen_mov_i32(ret, arg1);
507 } else {
508 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
512 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
513 unsigned int ofs, unsigned int len)
515 uint32_t mask;
516 TCGv_i32 t1;
518 tcg_debug_assert(ofs < 32);
519 tcg_debug_assert(len <= 32);
520 tcg_debug_assert(ofs + len <= 32);
522 if (ofs == 0 && len == 32) {
523 tcg_gen_mov_i32(ret, arg2);
524 return;
526 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
527 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
528 return;
531 mask = (1u << len) - 1;
532 t1 = tcg_temp_new_i32();
534 if (ofs + len < 32) {
535 tcg_gen_andi_i32(t1, arg2, mask);
536 tcg_gen_shli_i32(t1, t1, ofs);
537 } else {
538 tcg_gen_shli_i32(t1, arg2, ofs);
540 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
541 tcg_gen_or_i32(ret, ret, t1);
543 tcg_temp_free_i32(t1);
546 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
547 TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
549 if (TCG_TARGET_HAS_movcond_i32) {
550 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
551 } else {
552 TCGv_i32 t0 = tcg_temp_new_i32();
553 TCGv_i32 t1 = tcg_temp_new_i32();
554 tcg_gen_setcond_i32(cond, t0, c1, c2);
555 tcg_gen_neg_i32(t0, t0);
556 tcg_gen_and_i32(t1, v1, t0);
557 tcg_gen_andc_i32(ret, v2, t0);
558 tcg_gen_or_i32(ret, ret, t1);
559 tcg_temp_free_i32(t0);
560 tcg_temp_free_i32(t1);
564 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
565 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
567 if (TCG_TARGET_HAS_add2_i32) {
568 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
569 } else {
570 TCGv_i64 t0 = tcg_temp_new_i64();
571 TCGv_i64 t1 = tcg_temp_new_i64();
572 tcg_gen_concat_i32_i64(t0, al, ah);
573 tcg_gen_concat_i32_i64(t1, bl, bh);
574 tcg_gen_add_i64(t0, t0, t1);
575 tcg_gen_extr_i64_i32(rl, rh, t0);
576 tcg_temp_free_i64(t0);
577 tcg_temp_free_i64(t1);
581 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
582 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
584 if (TCG_TARGET_HAS_sub2_i32) {
585 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
586 } else {
587 TCGv_i64 t0 = tcg_temp_new_i64();
588 TCGv_i64 t1 = tcg_temp_new_i64();
589 tcg_gen_concat_i32_i64(t0, al, ah);
590 tcg_gen_concat_i32_i64(t1, bl, bh);
591 tcg_gen_sub_i64(t0, t0, t1);
592 tcg_gen_extr_i64_i32(rl, rh, t0);
593 tcg_temp_free_i64(t0);
594 tcg_temp_free_i64(t1);
598 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
600 if (TCG_TARGET_HAS_mulu2_i32) {
601 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
602 } else if (TCG_TARGET_HAS_muluh_i32) {
603 TCGv_i32 t = tcg_temp_new_i32();
604 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
605 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
606 tcg_gen_mov_i32(rl, t);
607 tcg_temp_free_i32(t);
608 } else {
609 TCGv_i64 t0 = tcg_temp_new_i64();
610 TCGv_i64 t1 = tcg_temp_new_i64();
611 tcg_gen_extu_i32_i64(t0, arg1);
612 tcg_gen_extu_i32_i64(t1, arg2);
613 tcg_gen_mul_i64(t0, t0, t1);
614 tcg_gen_extr_i64_i32(rl, rh, t0);
615 tcg_temp_free_i64(t0);
616 tcg_temp_free_i64(t1);
620 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
622 if (TCG_TARGET_HAS_muls2_i32) {
623 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
624 } else if (TCG_TARGET_HAS_mulsh_i32) {
625 TCGv_i32 t = tcg_temp_new_i32();
626 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
627 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
628 tcg_gen_mov_i32(rl, t);
629 tcg_temp_free_i32(t);
630 } else if (TCG_TARGET_REG_BITS == 32) {
631 TCGv_i32 t0 = tcg_temp_new_i32();
632 TCGv_i32 t1 = tcg_temp_new_i32();
633 TCGv_i32 t2 = tcg_temp_new_i32();
634 TCGv_i32 t3 = tcg_temp_new_i32();
635 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
636 /* Adjust for negative inputs. */
637 tcg_gen_sari_i32(t2, arg1, 31);
638 tcg_gen_sari_i32(t3, arg2, 31);
639 tcg_gen_and_i32(t2, t2, arg2);
640 tcg_gen_and_i32(t3, t3, arg1);
641 tcg_gen_sub_i32(rh, t1, t2);
642 tcg_gen_sub_i32(rh, rh, t3);
643 tcg_gen_mov_i32(rl, t0);
644 tcg_temp_free_i32(t0);
645 tcg_temp_free_i32(t1);
646 tcg_temp_free_i32(t2);
647 tcg_temp_free_i32(t3);
648 } else {
649 TCGv_i64 t0 = tcg_temp_new_i64();
650 TCGv_i64 t1 = tcg_temp_new_i64();
651 tcg_gen_ext_i32_i64(t0, arg1);
652 tcg_gen_ext_i32_i64(t1, arg2);
653 tcg_gen_mul_i64(t0, t0, t1);
654 tcg_gen_extr_i64_i32(rl, rh, t0);
655 tcg_temp_free_i64(t0);
656 tcg_temp_free_i64(t1);
660 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
662 if (TCG_TARGET_HAS_ext8s_i32) {
663 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
664 } else {
665 tcg_gen_shli_i32(ret, arg, 24);
666 tcg_gen_sari_i32(ret, ret, 24);
670 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
672 if (TCG_TARGET_HAS_ext16s_i32) {
673 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
674 } else {
675 tcg_gen_shli_i32(ret, arg, 16);
676 tcg_gen_sari_i32(ret, ret, 16);
680 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
682 if (TCG_TARGET_HAS_ext8u_i32) {
683 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
684 } else {
685 tcg_gen_andi_i32(ret, arg, 0xffu);
689 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
691 if (TCG_TARGET_HAS_ext16u_i32) {
692 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
693 } else {
694 tcg_gen_andi_i32(ret, arg, 0xffffu);
698 /* Note: we assume the two high bytes are set to zero */
699 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
701 if (TCG_TARGET_HAS_bswap16_i32) {
702 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
703 } else {
704 TCGv_i32 t0 = tcg_temp_new_i32();
706 tcg_gen_ext8u_i32(t0, arg);
707 tcg_gen_shli_i32(t0, t0, 8);
708 tcg_gen_shri_i32(ret, arg, 8);
709 tcg_gen_or_i32(ret, ret, t0);
710 tcg_temp_free_i32(t0);
714 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
716 if (TCG_TARGET_HAS_bswap32_i32) {
717 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
718 } else {
719 TCGv_i32 t0, t1;
720 t0 = tcg_temp_new_i32();
721 t1 = tcg_temp_new_i32();
723 tcg_gen_shli_i32(t0, arg, 24);
725 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
726 tcg_gen_shli_i32(t1, t1, 8);
727 tcg_gen_or_i32(t0, t0, t1);
729 tcg_gen_shri_i32(t1, arg, 8);
730 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
731 tcg_gen_or_i32(t0, t0, t1);
733 tcg_gen_shri_i32(t1, arg, 24);
734 tcg_gen_or_i32(ret, t0, t1);
735 tcg_temp_free_i32(t0);
736 tcg_temp_free_i32(t1);
740 /* 64-bit ops */
742 #if TCG_TARGET_REG_BITS == 32
743 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
745 void tcg_gen_discard_i64(TCGv_i64 arg)
747 tcg_gen_discard_i32(TCGV_LOW(arg));
748 tcg_gen_discard_i32(TCGV_HIGH(arg));
751 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
753 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
754 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
757 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
759 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
760 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
763 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
765 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
766 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
769 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
771 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
772 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
775 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
777 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
778 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
781 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
783 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
784 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
787 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
789 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
790 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
793 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
795 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
796 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
799 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
801 /* Since arg2 and ret have different types,
802 they cannot be the same temporary */
803 #ifdef TCG_TARGET_WORDS_BIGENDIAN
804 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
805 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
806 #else
807 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
808 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
809 #endif
812 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
814 #ifdef TCG_TARGET_WORDS_BIGENDIAN
815 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
816 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
817 #else
818 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
819 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
820 #endif
823 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
825 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
826 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
829 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
831 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
832 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
835 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
837 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
838 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
841 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
843 gen_helper_shl_i64(ret, arg1, arg2);
846 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
848 gen_helper_shr_i64(ret, arg1, arg2);
851 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
853 gen_helper_sar_i64(ret, arg1, arg2);
856 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
858 TCGv_i64 t0;
859 TCGv_i32 t1;
861 t0 = tcg_temp_new_i64();
862 t1 = tcg_temp_new_i32();
864 tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
865 TCGV_LOW(arg1), TCGV_LOW(arg2));
867 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
868 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
869 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
870 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
872 tcg_gen_mov_i64(ret, t0);
873 tcg_temp_free_i64(t0);
874 tcg_temp_free_i32(t1);
876 #endif /* TCG_TARGET_REG_SIZE == 32 */
878 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
880 /* some cases can be optimized here */
881 if (arg2 == 0) {
882 tcg_gen_mov_i64(ret, arg1);
883 } else {
884 TCGv_i64 t0 = tcg_const_i64(arg2);
885 tcg_gen_add_i64(ret, arg1, t0);
886 tcg_temp_free_i64(t0);
890 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
892 if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
893 /* Don't recurse with tcg_gen_neg_i64. */
894 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
895 } else {
896 TCGv_i64 t0 = tcg_const_i64(arg1);
897 tcg_gen_sub_i64(ret, t0, arg2);
898 tcg_temp_free_i64(t0);
902 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
904 /* some cases can be optimized here */
905 if (arg2 == 0) {
906 tcg_gen_mov_i64(ret, arg1);
907 } else {
908 TCGv_i64 t0 = tcg_const_i64(arg2);
909 tcg_gen_sub_i64(ret, arg1, t0);
910 tcg_temp_free_i64(t0);
914 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
916 TCGv_i64 t0;
918 if (TCG_TARGET_REG_BITS == 32) {
919 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
920 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
921 return;
924 /* Some cases can be optimized here. */
925 switch (arg2) {
926 case 0:
927 tcg_gen_movi_i64(ret, 0);
928 return;
929 case 0xffffffffffffffffull:
930 tcg_gen_mov_i64(ret, arg1);
931 return;
932 case 0xffull:
933 /* Don't recurse with tcg_gen_ext8u_i64. */
934 if (TCG_TARGET_HAS_ext8u_i64) {
935 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
936 return;
938 break;
939 case 0xffffu:
940 if (TCG_TARGET_HAS_ext16u_i64) {
941 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
942 return;
944 break;
945 case 0xffffffffull:
946 if (TCG_TARGET_HAS_ext32u_i64) {
947 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
948 return;
950 break;
952 t0 = tcg_const_i64(arg2);
953 tcg_gen_and_i64(ret, arg1, t0);
954 tcg_temp_free_i64(t0);
957 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
959 if (TCG_TARGET_REG_BITS == 32) {
960 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
961 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
962 return;
964 /* Some cases can be optimized here. */
965 if (arg2 == -1) {
966 tcg_gen_movi_i64(ret, -1);
967 } else if (arg2 == 0) {
968 tcg_gen_mov_i64(ret, arg1);
969 } else {
970 TCGv_i64 t0 = tcg_const_i64(arg2);
971 tcg_gen_or_i64(ret, arg1, t0);
972 tcg_temp_free_i64(t0);
976 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
978 if (TCG_TARGET_REG_BITS == 32) {
979 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
980 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
981 return;
983 /* Some cases can be optimized here. */
984 if (arg2 == 0) {
985 tcg_gen_mov_i64(ret, arg1);
986 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
987 /* Don't recurse with tcg_gen_not_i64. */
988 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
989 } else {
990 TCGv_i64 t0 = tcg_const_i64(arg2);
991 tcg_gen_xor_i64(ret, arg1, t0);
992 tcg_temp_free_i64(t0);
996 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
997 unsigned c, bool right, bool arith)
999 tcg_debug_assert(c < 64);
1000 if (c == 0) {
1001 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1002 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1003 } else if (c >= 32) {
1004 c -= 32;
1005 if (right) {
1006 if (arith) {
1007 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1008 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1009 } else {
1010 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1011 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1013 } else {
1014 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1015 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1017 } else {
1018 TCGv_i32 t0, t1;
1020 t0 = tcg_temp_new_i32();
1021 t1 = tcg_temp_new_i32();
1022 if (right) {
1023 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1024 if (arith) {
1025 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1026 } else {
1027 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1029 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1030 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1031 tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1032 } else {
1033 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1034 /* Note: ret can be the same as arg1, so we use t1 */
1035 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1036 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1037 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1038 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1040 tcg_temp_free_i32(t0);
1041 tcg_temp_free_i32(t1);
1045 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1047 tcg_debug_assert(arg2 < 64);
1048 if (TCG_TARGET_REG_BITS == 32) {
1049 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1050 } else if (arg2 == 0) {
1051 tcg_gen_mov_i64(ret, arg1);
1052 } else {
1053 TCGv_i64 t0 = tcg_const_i64(arg2);
1054 tcg_gen_shl_i64(ret, arg1, t0);
1055 tcg_temp_free_i64(t0);
1059 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1061 tcg_debug_assert(arg2 < 64);
1062 if (TCG_TARGET_REG_BITS == 32) {
1063 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1064 } else if (arg2 == 0) {
1065 tcg_gen_mov_i64(ret, arg1);
1066 } else {
1067 TCGv_i64 t0 = tcg_const_i64(arg2);
1068 tcg_gen_shr_i64(ret, arg1, t0);
1069 tcg_temp_free_i64(t0);
1073 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1075 tcg_debug_assert(arg2 < 64);
1076 if (TCG_TARGET_REG_BITS == 32) {
1077 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1078 } else if (arg2 == 0) {
1079 tcg_gen_mov_i64(ret, arg1);
1080 } else {
1081 TCGv_i64 t0 = tcg_const_i64(arg2);
1082 tcg_gen_sar_i64(ret, arg1, t0);
1083 tcg_temp_free_i64(t0);
1087 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, int label)
1089 if (cond == TCG_COND_ALWAYS) {
1090 tcg_gen_br(label);
1091 } else if (cond != TCG_COND_NEVER) {
1092 if (TCG_TARGET_REG_BITS == 32) {
1093 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1094 TCGV_HIGH(arg1), TCGV_LOW(arg2),
1095 TCGV_HIGH(arg2), cond, label);
1096 } else {
1097 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label);
1102 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, int label)
1104 if (cond == TCG_COND_ALWAYS) {
1105 tcg_gen_br(label);
1106 } else if (cond != TCG_COND_NEVER) {
1107 TCGv_i64 t0 = tcg_const_i64(arg2);
1108 tcg_gen_brcond_i64(cond, arg1, t0, label);
1109 tcg_temp_free_i64(t0);
1113 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1114 TCGv_i64 arg1, TCGv_i64 arg2)
1116 if (cond == TCG_COND_ALWAYS) {
1117 tcg_gen_movi_i64(ret, 1);
1118 } else if (cond == TCG_COND_NEVER) {
1119 tcg_gen_movi_i64(ret, 0);
1120 } else {
1121 if (TCG_TARGET_REG_BITS == 32) {
1122 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1123 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1124 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1125 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1126 } else {
1127 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1132 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1133 TCGv_i64 arg1, int64_t arg2)
1135 TCGv_i64 t0 = tcg_const_i64(arg2);
1136 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1137 tcg_temp_free_i64(t0);
1140 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1142 TCGv_i64 t0 = tcg_const_i64(arg2);
1143 tcg_gen_mul_i64(ret, arg1, t0);
1144 tcg_temp_free_i64(t0);
1147 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1149 if (TCG_TARGET_HAS_div_i64) {
1150 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1151 } else if (TCG_TARGET_HAS_div2_i64) {
1152 TCGv_i64 t0 = tcg_temp_new_i64();
1153 tcg_gen_sari_i64(t0, arg1, 63);
1154 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1155 tcg_temp_free_i64(t0);
1156 } else {
1157 gen_helper_div_i64(ret, arg1, arg2);
1161 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1163 if (TCG_TARGET_HAS_rem_i64) {
1164 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1165 } else if (TCG_TARGET_HAS_div_i64) {
1166 TCGv_i64 t0 = tcg_temp_new_i64();
1167 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1168 tcg_gen_mul_i64(t0, t0, arg2);
1169 tcg_gen_sub_i64(ret, arg1, t0);
1170 tcg_temp_free_i64(t0);
1171 } else if (TCG_TARGET_HAS_div2_i64) {
1172 TCGv_i64 t0 = tcg_temp_new_i64();
1173 tcg_gen_sari_i64(t0, arg1, 63);
1174 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1175 tcg_temp_free_i64(t0);
1176 } else {
1177 gen_helper_rem_i64(ret, arg1, arg2);
1181 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1183 if (TCG_TARGET_HAS_div_i64) {
1184 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1185 } else if (TCG_TARGET_HAS_div2_i64) {
1186 TCGv_i64 t0 = tcg_temp_new_i64();
1187 tcg_gen_movi_i64(t0, 0);
1188 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1189 tcg_temp_free_i64(t0);
1190 } else {
1191 gen_helper_divu_i64(ret, arg1, arg2);
1195 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1197 if (TCG_TARGET_HAS_rem_i64) {
1198 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1199 } else if (TCG_TARGET_HAS_div_i64) {
1200 TCGv_i64 t0 = tcg_temp_new_i64();
1201 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1202 tcg_gen_mul_i64(t0, t0, arg2);
1203 tcg_gen_sub_i64(ret, arg1, t0);
1204 tcg_temp_free_i64(t0);
1205 } else if (TCG_TARGET_HAS_div2_i64) {
1206 TCGv_i64 t0 = tcg_temp_new_i64();
1207 tcg_gen_movi_i64(t0, 0);
1208 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1209 tcg_temp_free_i64(t0);
1210 } else {
1211 gen_helper_remu_i64(ret, arg1, arg2);
1215 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1217 if (TCG_TARGET_REG_BITS == 32) {
1218 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1219 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1220 } else if (TCG_TARGET_HAS_ext8s_i64) {
1221 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1222 } else {
1223 tcg_gen_shli_i64(ret, arg, 56);
1224 tcg_gen_sari_i64(ret, ret, 56);
1228 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1230 if (TCG_TARGET_REG_BITS == 32) {
1231 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1232 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1233 } else if (TCG_TARGET_HAS_ext16s_i64) {
1234 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1235 } else {
1236 tcg_gen_shli_i64(ret, arg, 48);
1237 tcg_gen_sari_i64(ret, ret, 48);
1241 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1243 if (TCG_TARGET_REG_BITS == 32) {
1244 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1245 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1246 } else if (TCG_TARGET_HAS_ext32s_i64) {
1247 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1248 } else {
1249 tcg_gen_shli_i64(ret, arg, 32);
1250 tcg_gen_sari_i64(ret, ret, 32);
1254 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1256 if (TCG_TARGET_REG_BITS == 32) {
1257 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1258 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1259 } else if (TCG_TARGET_HAS_ext8u_i64) {
1260 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1261 } else {
1262 tcg_gen_andi_i64(ret, arg, 0xffu);
1266 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1268 if (TCG_TARGET_REG_BITS == 32) {
1269 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1270 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1271 } else if (TCG_TARGET_HAS_ext16u_i64) {
1272 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1273 } else {
1274 tcg_gen_andi_i64(ret, arg, 0xffffu);
1278 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1280 if (TCG_TARGET_REG_BITS == 32) {
1281 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1282 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1283 } else if (TCG_TARGET_HAS_ext32u_i64) {
1284 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1285 } else {
1286 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1290 /* Note: we assume the six high bytes are set to zero */
1291 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1293 if (TCG_TARGET_REG_BITS == 32) {
1294 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1295 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1296 } else if (TCG_TARGET_HAS_bswap16_i64) {
1297 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1298 } else {
1299 TCGv_i64 t0 = tcg_temp_new_i64();
1301 tcg_gen_ext8u_i64(t0, arg);
1302 tcg_gen_shli_i64(t0, t0, 8);
1303 tcg_gen_shri_i64(ret, arg, 8);
1304 tcg_gen_or_i64(ret, ret, t0);
1305 tcg_temp_free_i64(t0);
1309 /* Note: we assume the four high bytes are set to zero */
1310 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1312 if (TCG_TARGET_REG_BITS == 32) {
1313 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1314 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1315 } else if (TCG_TARGET_HAS_bswap32_i64) {
1316 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1317 } else {
1318 TCGv_i64 t0, t1;
1319 t0 = tcg_temp_new_i64();
1320 t1 = tcg_temp_new_i64();
1322 tcg_gen_shli_i64(t0, arg, 24);
1323 tcg_gen_ext32u_i64(t0, t0);
1325 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1326 tcg_gen_shli_i64(t1, t1, 8);
1327 tcg_gen_or_i64(t0, t0, t1);
1329 tcg_gen_shri_i64(t1, arg, 8);
1330 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1331 tcg_gen_or_i64(t0, t0, t1);
1333 tcg_gen_shri_i64(t1, arg, 24);
1334 tcg_gen_or_i64(ret, t0, t1);
1335 tcg_temp_free_i64(t0);
1336 tcg_temp_free_i64(t1);
1340 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1342 if (TCG_TARGET_REG_BITS == 32) {
1343 TCGv_i32 t0, t1;
1344 t0 = tcg_temp_new_i32();
1345 t1 = tcg_temp_new_i32();
1347 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1348 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1349 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1350 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1351 tcg_temp_free_i32(t0);
1352 tcg_temp_free_i32(t1);
1353 } else if (TCG_TARGET_HAS_bswap64_i64) {
1354 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1355 } else {
1356 TCGv_i64 t0 = tcg_temp_new_i64();
1357 TCGv_i64 t1 = tcg_temp_new_i64();
1359 tcg_gen_shli_i64(t0, arg, 56);
1361 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1362 tcg_gen_shli_i64(t1, t1, 40);
1363 tcg_gen_or_i64(t0, t0, t1);
1365 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1366 tcg_gen_shli_i64(t1, t1, 24);
1367 tcg_gen_or_i64(t0, t0, t1);
1369 tcg_gen_andi_i64(t1, arg, 0xff000000);
1370 tcg_gen_shli_i64(t1, t1, 8);
1371 tcg_gen_or_i64(t0, t0, t1);
1373 tcg_gen_shri_i64(t1, arg, 8);
1374 tcg_gen_andi_i64(t1, t1, 0xff000000);
1375 tcg_gen_or_i64(t0, t0, t1);
1377 tcg_gen_shri_i64(t1, arg, 24);
1378 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1379 tcg_gen_or_i64(t0, t0, t1);
1381 tcg_gen_shri_i64(t1, arg, 40);
1382 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1383 tcg_gen_or_i64(t0, t0, t1);
1385 tcg_gen_shri_i64(t1, arg, 56);
1386 tcg_gen_or_i64(ret, t0, t1);
1387 tcg_temp_free_i64(t0);
1388 tcg_temp_free_i64(t1);
1392 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1394 if (TCG_TARGET_REG_BITS == 32) {
1395 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1396 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1397 } else if (TCG_TARGET_HAS_not_i64) {
1398 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1399 } else {
1400 tcg_gen_xori_i64(ret, arg, -1);
1404 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1406 if (TCG_TARGET_REG_BITS == 32) {
1407 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1408 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1409 } else if (TCG_TARGET_HAS_andc_i64) {
1410 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1411 } else {
1412 TCGv_i64 t0 = tcg_temp_new_i64();
1413 tcg_gen_not_i64(t0, arg2);
1414 tcg_gen_and_i64(ret, arg1, t0);
1415 tcg_temp_free_i64(t0);
1419 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1421 if (TCG_TARGET_REG_BITS == 32) {
1422 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1423 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1424 } else if (TCG_TARGET_HAS_eqv_i64) {
1425 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1426 } else {
1427 tcg_gen_xor_i64(ret, arg1, arg2);
1428 tcg_gen_not_i64(ret, ret);
1432 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1434 if (TCG_TARGET_REG_BITS == 32) {
1435 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1436 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1437 } else if (TCG_TARGET_HAS_nand_i64) {
1438 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1439 } else {
1440 tcg_gen_and_i64(ret, arg1, arg2);
1441 tcg_gen_not_i64(ret, ret);
1445 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1447 if (TCG_TARGET_REG_BITS == 32) {
1448 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1449 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1450 } else if (TCG_TARGET_HAS_nor_i64) {
1451 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1452 } else {
1453 tcg_gen_or_i64(ret, arg1, arg2);
1454 tcg_gen_not_i64(ret, ret);
1458 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1460 if (TCG_TARGET_REG_BITS == 32) {
1461 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1462 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1463 } else if (TCG_TARGET_HAS_orc_i64) {
1464 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1465 } else {
1466 TCGv_i64 t0 = tcg_temp_new_i64();
1467 tcg_gen_not_i64(t0, arg2);
1468 tcg_gen_or_i64(ret, arg1, t0);
1469 tcg_temp_free_i64(t0);
1473 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1475 if (TCG_TARGET_HAS_rot_i64) {
1476 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1477 } else {
1478 TCGv_i64 t0, t1;
1479 t0 = tcg_temp_new_i64();
1480 t1 = tcg_temp_new_i64();
1481 tcg_gen_shl_i64(t0, arg1, arg2);
1482 tcg_gen_subfi_i64(t1, 64, arg2);
1483 tcg_gen_shr_i64(t1, arg1, t1);
1484 tcg_gen_or_i64(ret, t0, t1);
1485 tcg_temp_free_i64(t0);
1486 tcg_temp_free_i64(t1);
1490 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1492 tcg_debug_assert(arg2 < 64);
1493 /* some cases can be optimized here */
1494 if (arg2 == 0) {
1495 tcg_gen_mov_i64(ret, arg1);
1496 } else if (TCG_TARGET_HAS_rot_i64) {
1497 TCGv_i64 t0 = tcg_const_i64(arg2);
1498 tcg_gen_rotl_i64(ret, arg1, t0);
1499 tcg_temp_free_i64(t0);
1500 } else {
1501 TCGv_i64 t0, t1;
1502 t0 = tcg_temp_new_i64();
1503 t1 = tcg_temp_new_i64();
1504 tcg_gen_shli_i64(t0, arg1, arg2);
1505 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1506 tcg_gen_or_i64(ret, t0, t1);
1507 tcg_temp_free_i64(t0);
1508 tcg_temp_free_i64(t1);
1512 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1514 if (TCG_TARGET_HAS_rot_i64) {
1515 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1516 } else {
1517 TCGv_i64 t0, t1;
1518 t0 = tcg_temp_new_i64();
1519 t1 = tcg_temp_new_i64();
1520 tcg_gen_shr_i64(t0, arg1, arg2);
1521 tcg_gen_subfi_i64(t1, 64, arg2);
1522 tcg_gen_shl_i64(t1, arg1, t1);
1523 tcg_gen_or_i64(ret, t0, t1);
1524 tcg_temp_free_i64(t0);
1525 tcg_temp_free_i64(t1);
1529 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1531 tcg_debug_assert(arg2 < 64);
1532 /* some cases can be optimized here */
1533 if (arg2 == 0) {
1534 tcg_gen_mov_i64(ret, arg1);
1535 } else {
1536 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1540 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1541 unsigned int ofs, unsigned int len)
1543 uint64_t mask;
1544 TCGv_i64 t1;
1546 tcg_debug_assert(ofs < 64);
1547 tcg_debug_assert(len <= 64);
1548 tcg_debug_assert(ofs + len <= 64);
1550 if (ofs == 0 && len == 64) {
1551 tcg_gen_mov_i64(ret, arg2);
1552 return;
1554 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1555 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1556 return;
1559 if (TCG_TARGET_REG_BITS == 32) {
1560 if (ofs >= 32) {
1561 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1562 TCGV_LOW(arg2), ofs - 32, len);
1563 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1564 return;
1566 if (ofs + len <= 32) {
1567 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1568 TCGV_LOW(arg2), ofs, len);
1569 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1570 return;
1574 mask = (1ull << len) - 1;
1575 t1 = tcg_temp_new_i64();
1577 if (ofs + len < 64) {
1578 tcg_gen_andi_i64(t1, arg2, mask);
1579 tcg_gen_shli_i64(t1, t1, ofs);
1580 } else {
1581 tcg_gen_shli_i64(t1, arg2, ofs);
1583 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1584 tcg_gen_or_i64(ret, ret, t1);
1586 tcg_temp_free_i64(t1);
1589 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1590 TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1592 if (TCG_TARGET_REG_BITS == 32) {
1593 TCGv_i32 t0 = tcg_temp_new_i32();
1594 TCGv_i32 t1 = tcg_temp_new_i32();
1595 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1596 TCGV_LOW(c1), TCGV_HIGH(c1),
1597 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1599 if (TCG_TARGET_HAS_movcond_i32) {
1600 tcg_gen_movi_i32(t1, 0);
1601 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1602 TCGV_LOW(v1), TCGV_LOW(v2));
1603 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1604 TCGV_HIGH(v1), TCGV_HIGH(v2));
1605 } else {
1606 tcg_gen_neg_i32(t0, t0);
1608 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1609 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1610 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1612 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1613 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1614 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1616 tcg_temp_free_i32(t0);
1617 tcg_temp_free_i32(t1);
1618 } else if (TCG_TARGET_HAS_movcond_i64) {
1619 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1620 } else {
1621 TCGv_i64 t0 = tcg_temp_new_i64();
1622 TCGv_i64 t1 = tcg_temp_new_i64();
1623 tcg_gen_setcond_i64(cond, t0, c1, c2);
1624 tcg_gen_neg_i64(t0, t0);
1625 tcg_gen_and_i64(t1, v1, t0);
1626 tcg_gen_andc_i64(ret, v2, t0);
1627 tcg_gen_or_i64(ret, ret, t1);
1628 tcg_temp_free_i64(t0);
1629 tcg_temp_free_i64(t1);
1633 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1634 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1636 if (TCG_TARGET_HAS_add2_i64) {
1637 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1638 } else {
1639 TCGv_i64 t0 = tcg_temp_new_i64();
1640 TCGv_i64 t1 = tcg_temp_new_i64();
1641 tcg_gen_add_i64(t0, al, bl);
1642 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1643 tcg_gen_add_i64(rh, ah, bh);
1644 tcg_gen_add_i64(rh, rh, t1);
1645 tcg_gen_mov_i64(rl, t0);
1646 tcg_temp_free_i64(t0);
1647 tcg_temp_free_i64(t1);
1651 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1652 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1654 if (TCG_TARGET_HAS_sub2_i64) {
1655 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1656 } else {
1657 TCGv_i64 t0 = tcg_temp_new_i64();
1658 TCGv_i64 t1 = tcg_temp_new_i64();
1659 tcg_gen_sub_i64(t0, al, bl);
1660 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1661 tcg_gen_sub_i64(rh, ah, bh);
1662 tcg_gen_sub_i64(rh, rh, t1);
1663 tcg_gen_mov_i64(rl, t0);
1664 tcg_temp_free_i64(t0);
1665 tcg_temp_free_i64(t1);
1669 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1671 if (TCG_TARGET_HAS_mulu2_i64) {
1672 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1673 } else if (TCG_TARGET_HAS_muluh_i64) {
1674 TCGv_i64 t = tcg_temp_new_i64();
1675 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1676 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1677 tcg_gen_mov_i64(rl, t);
1678 tcg_temp_free_i64(t);
1679 } else {
1680 TCGv_i64 t0 = tcg_temp_new_i64();
1681 tcg_gen_mul_i64(t0, arg1, arg2);
1682 gen_helper_muluh_i64(rh, arg1, arg2);
1683 tcg_gen_mov_i64(rl, t0);
1684 tcg_temp_free_i64(t0);
1688 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1690 if (TCG_TARGET_HAS_muls2_i64) {
1691 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1692 } else if (TCG_TARGET_HAS_mulsh_i64) {
1693 TCGv_i64 t = tcg_temp_new_i64();
1694 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1695 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1696 tcg_gen_mov_i64(rl, t);
1697 tcg_temp_free_i64(t);
1698 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1699 TCGv_i64 t0 = tcg_temp_new_i64();
1700 TCGv_i64 t1 = tcg_temp_new_i64();
1701 TCGv_i64 t2 = tcg_temp_new_i64();
1702 TCGv_i64 t3 = tcg_temp_new_i64();
1703 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1704 /* Adjust for negative inputs. */
1705 tcg_gen_sari_i64(t2, arg1, 63);
1706 tcg_gen_sari_i64(t3, arg2, 63);
1707 tcg_gen_and_i64(t2, t2, arg2);
1708 tcg_gen_and_i64(t3, t3, arg1);
1709 tcg_gen_sub_i64(rh, t1, t2);
1710 tcg_gen_sub_i64(rh, rh, t3);
1711 tcg_gen_mov_i64(rl, t0);
1712 tcg_temp_free_i64(t0);
1713 tcg_temp_free_i64(t1);
1714 tcg_temp_free_i64(t2);
1715 tcg_temp_free_i64(t3);
1716 } else {
1717 TCGv_i64 t0 = tcg_temp_new_i64();
1718 tcg_gen_mul_i64(t0, arg1, arg2);
1719 gen_helper_mulsh_i64(rh, arg1, arg2);
1720 tcg_gen_mov_i64(rl, t0);
1721 tcg_temp_free_i64(t0);
1725 /* Size changing operations. */
1727 void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg, unsigned count)
1729 tcg_debug_assert(count < 64);
1730 if (TCG_TARGET_REG_BITS == 32) {
1731 if (count >= 32) {
1732 tcg_gen_shri_i32(ret, TCGV_HIGH(arg), count - 32);
1733 } else if (count == 0) {
1734 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1735 } else {
1736 TCGv_i64 t = tcg_temp_new_i64();
1737 tcg_gen_shri_i64(t, arg, count);
1738 tcg_gen_mov_i32(ret, TCGV_LOW(t));
1739 tcg_temp_free_i64(t);
1741 } else if (TCG_TARGET_HAS_trunc_shr_i32) {
1742 tcg_gen_op3i_i32(INDEX_op_trunc_shr_i32, ret,
1743 MAKE_TCGV_I32(GET_TCGV_I64(arg)), count);
1744 } else if (count == 0) {
1745 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1746 } else {
1747 TCGv_i64 t = tcg_temp_new_i64();
1748 tcg_gen_shri_i64(t, arg, count);
1749 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1750 tcg_temp_free_i64(t);
1754 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1756 if (TCG_TARGET_REG_BITS == 32) {
1757 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1758 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1759 } else {
1760 /* Note: we assume the target supports move between
1761 32 and 64 bit registers. */
1762 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1766 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1768 if (TCG_TARGET_REG_BITS == 32) {
1769 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1770 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1771 } else {
1772 /* Note: we assume the target supports move between
1773 32 and 64 bit registers. */
1774 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1778 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1780 TCGv_i64 tmp;
1782 if (TCG_TARGET_REG_BITS == 32) {
1783 tcg_gen_mov_i32(TCGV_LOW(dest), low);
1784 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1785 return;
1788 tmp = tcg_temp_new_i64();
1789 /* These extensions are only needed for type correctness.
1790 We may be able to do better given target specific information. */
1791 tcg_gen_extu_i32_i64(tmp, high);
1792 tcg_gen_extu_i32_i64(dest, low);
1793 /* If deposit is available, use it. Otherwise use the extra
1794 knowledge that we have of the zero-extensions above. */
1795 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1796 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1797 } else {
1798 tcg_gen_shli_i64(tmp, tmp, 32);
1799 tcg_gen_or_i64(dest, dest, tmp);
1801 tcg_temp_free_i64(tmp);
1804 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1806 if (TCG_TARGET_REG_BITS == 32) {
1807 tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1808 tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1809 } else {
1810 tcg_gen_trunc_shr_i64_i32(lo, arg, 0);
1811 tcg_gen_trunc_shr_i64_i32(hi, arg, 32);
1815 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1817 tcg_gen_ext32u_i64(lo, arg);
1818 tcg_gen_shri_i64(hi, arg, 32);
1821 /* QEMU specific operations. */
1823 void tcg_gen_goto_tb(unsigned idx)
1825 /* We only support two chained exits. */
1826 tcg_debug_assert(idx <= 1);
1827 #ifdef CONFIG_DEBUG_TCG
1828 /* Verify that we havn't seen this numbered exit before. */
1829 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1830 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1831 #endif
1832 tcg_gen_op1i(INDEX_op_goto_tb, idx);
1835 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1837 switch (op & MO_SIZE) {
1838 case MO_8:
1839 op &= ~MO_BSWAP;
1840 break;
1841 case MO_16:
1842 break;
1843 case MO_32:
1844 if (!is64) {
1845 op &= ~MO_SIGN;
1847 break;
1848 case MO_64:
1849 if (!is64) {
1850 tcg_abort();
1852 break;
1854 if (st) {
1855 op &= ~MO_SIGN;
1857 return op;
1860 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1861 TCGMemOp memop, TCGArg idx)
1863 #if TARGET_LONG_BITS == 32
1864 tcg_gen_op4ii_i32(opc, val, addr, memop, idx);
1865 #else
1866 if (TCG_TARGET_REG_BITS == 32) {
1867 tcg_gen_op5ii_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr),
1868 memop, idx);
1869 } else {
1870 tcg_gen_op4(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr),
1871 memop, idx);
1873 #endif
1876 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1877 TCGMemOp memop, TCGArg idx)
1879 #if TARGET_LONG_BITS == 32
1880 if (TCG_TARGET_REG_BITS == 32) {
1881 tcg_gen_op5ii_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1882 addr, memop, idx);
1883 } else {
1884 tcg_gen_op4(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr),
1885 memop, idx);
1887 #else
1888 if (TCG_TARGET_REG_BITS == 32) {
1889 tcg_gen_op6ii_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1890 TCGV_LOW(addr), TCGV_HIGH(addr), memop, idx);
1891 } else {
1892 tcg_gen_op4ii_i64(opc, val, addr, memop, idx);
1894 #endif
1897 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1899 memop = tcg_canonicalize_memop(memop, 0, 0);
1900 gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1903 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1905 memop = tcg_canonicalize_memop(memop, 0, 1);
1906 gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1909 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1911 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1912 tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1913 if (memop & MO_SIGN) {
1914 tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1915 } else {
1916 tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1918 return;
1921 memop = tcg_canonicalize_memop(memop, 1, 0);
1922 gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1925 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1927 if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1928 tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1929 return;
1932 memop = tcg_canonicalize_memop(memop, 1, 1);
1933 gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);