configure: remove variable bogus_os
[qemu/ar7.git] / tcg / tci.c
blob5d97b7c71c873f2a5a3ad880960920949a32c76e
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 /* Marker for missing code. */
37 #define TODO() \
38 do { \
39 fprintf(stderr, "TODO %s:%u: %s()\n", \
40 __FILE__, __LINE__, __func__); \
41 tcg_abort(); \
42 } while (0)
44 #if MAX_OPC_PARAM_IARGS != 6
45 # error Fix needed, number of supported input arguments changed!
46 #endif
47 #if TCG_TARGET_REG_BITS == 32
48 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong,
52 tcg_target_ulong, tcg_target_ulong,
53 tcg_target_ulong, tcg_target_ulong);
54 #else
55 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
56 tcg_target_ulong, tcg_target_ulong,
57 tcg_target_ulong, tcg_target_ulong);
58 #endif
60 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
62 tci_assert(index < TCG_TARGET_NB_REGS);
63 return regs[index];
66 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
67 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
69 return (int8_t)tci_read_reg(regs, index);
71 #endif
73 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
74 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
76 return (int16_t)tci_read_reg(regs, index);
78 #endif
80 #if TCG_TARGET_REG_BITS == 64
81 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
83 return (int32_t)tci_read_reg(regs, index);
85 #endif
87 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
89 return (uint8_t)tci_read_reg(regs, index);
92 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
94 return (uint16_t)tci_read_reg(regs, index);
97 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
99 return (uint32_t)tci_read_reg(regs, index);
102 #if TCG_TARGET_REG_BITS == 64
103 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
105 return tci_read_reg(regs, index);
107 #endif
109 static void
110 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
112 tci_assert(index < TCG_TARGET_NB_REGS);
113 tci_assert(index != TCG_AREG0);
114 tci_assert(index != TCG_REG_CALL_STACK);
115 regs[index] = value;
118 #if TCG_TARGET_REG_BITS == 64
119 static void
120 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
122 tci_write_reg(regs, index, value);
124 #endif
126 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
128 tci_write_reg(regs, index, value);
131 static void
132 tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value)
134 tci_write_reg(regs, index, value);
137 static void
138 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
140 tci_write_reg(regs, index, value);
143 #if TCG_TARGET_REG_BITS == 32
144 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
145 uint32_t low_index, uint64_t value)
147 tci_write_reg(regs, low_index, value);
148 tci_write_reg(regs, high_index, value >> 32);
150 #elif TCG_TARGET_REG_BITS == 64
151 static void
152 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
154 tci_write_reg(regs, index, value);
156 #endif
158 #if TCG_TARGET_REG_BITS == 32
159 /* Create a 64 bit value from two 32 bit values. */
160 static uint64_t tci_uint64(uint32_t high, uint32_t low)
162 return ((uint64_t)high << 32) + low;
164 #endif
166 /* Read constant (native size) from bytecode. */
167 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
169 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
170 *tb_ptr += sizeof(value);
171 return value;
174 /* Read unsigned constant (32 bit) from bytecode. */
175 static uint32_t tci_read_i32(uint8_t **tb_ptr)
177 uint32_t value = *(uint32_t *)(*tb_ptr);
178 *tb_ptr += sizeof(value);
179 return value;
182 /* Read signed constant (32 bit) from bytecode. */
183 static int32_t tci_read_s32(uint8_t **tb_ptr)
185 int32_t value = *(int32_t *)(*tb_ptr);
186 *tb_ptr += sizeof(value);
187 return value;
190 #if TCG_TARGET_REG_BITS == 64
191 /* Read constant (64 bit) from bytecode. */
192 static uint64_t tci_read_i64(uint8_t **tb_ptr)
194 uint64_t value = *(uint64_t *)(*tb_ptr);
195 *tb_ptr += sizeof(value);
196 return value;
198 #endif
200 /* Read indexed register (native size) from bytecode. */
201 static tcg_target_ulong
202 tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
204 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
205 *tb_ptr += 1;
206 return value;
209 /* Read indexed register (8 bit) from bytecode. */
210 static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
212 uint8_t value = tci_read_reg8(regs, **tb_ptr);
213 *tb_ptr += 1;
214 return value;
217 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
218 /* Read indexed register (8 bit signed) from bytecode. */
219 static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
221 int8_t value = tci_read_reg8s(regs, **tb_ptr);
222 *tb_ptr += 1;
223 return value;
225 #endif
227 /* Read indexed register (16 bit) from bytecode. */
228 static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
230 uint16_t value = tci_read_reg16(regs, **tb_ptr);
231 *tb_ptr += 1;
232 return value;
235 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
236 /* Read indexed register (16 bit signed) from bytecode. */
237 static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
239 int16_t value = tci_read_reg16s(regs, **tb_ptr);
240 *tb_ptr += 1;
241 return value;
243 #endif
245 /* Read indexed register (32 bit) from bytecode. */
246 static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
248 uint32_t value = tci_read_reg32(regs, **tb_ptr);
249 *tb_ptr += 1;
250 return value;
253 #if TCG_TARGET_REG_BITS == 32
254 /* Read two indexed registers (2 * 32 bit) from bytecode. */
255 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
257 uint32_t low = tci_read_r32(regs, tb_ptr);
258 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
260 #elif TCG_TARGET_REG_BITS == 64
261 /* Read indexed register (32 bit signed) from bytecode. */
262 static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
264 int32_t value = tci_read_reg32s(regs, **tb_ptr);
265 *tb_ptr += 1;
266 return value;
269 /* Read indexed register (64 bit) from bytecode. */
270 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
272 uint64_t value = tci_read_reg64(regs, **tb_ptr);
273 *tb_ptr += 1;
274 return value;
276 #endif
278 /* Read indexed register(s) with target address from bytecode. */
279 static target_ulong
280 tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
282 target_ulong taddr = tci_read_r(regs, tb_ptr);
283 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
284 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
285 #endif
286 return taddr;
289 /* Read indexed register or constant (native size) from bytecode. */
290 static tcg_target_ulong
291 tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
293 tcg_target_ulong value;
294 TCGReg r = **tb_ptr;
295 *tb_ptr += 1;
296 if (r == TCG_CONST) {
297 value = tci_read_i(tb_ptr);
298 } else {
299 value = tci_read_reg(regs, r);
301 return value;
304 /* Read indexed register or constant (32 bit) from bytecode. */
305 static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
307 uint32_t value;
308 TCGReg r = **tb_ptr;
309 *tb_ptr += 1;
310 if (r == TCG_CONST) {
311 value = tci_read_i32(tb_ptr);
312 } else {
313 value = tci_read_reg32(regs, r);
315 return value;
318 #if TCG_TARGET_REG_BITS == 32
319 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
320 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
322 uint32_t low = tci_read_ri32(regs, tb_ptr);
323 return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
325 #elif TCG_TARGET_REG_BITS == 64
326 /* Read indexed register or constant (64 bit) from bytecode. */
327 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
329 uint64_t value;
330 TCGReg r = **tb_ptr;
331 *tb_ptr += 1;
332 if (r == TCG_CONST) {
333 value = tci_read_i64(tb_ptr);
334 } else {
335 value = tci_read_reg64(regs, r);
337 return value;
339 #endif
341 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
343 tcg_target_ulong label = tci_read_i(tb_ptr);
344 tci_assert(label != 0);
345 return label;
348 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
350 bool result = false;
351 int32_t i0 = u0;
352 int32_t i1 = u1;
353 switch (condition) {
354 case TCG_COND_EQ:
355 result = (u0 == u1);
356 break;
357 case TCG_COND_NE:
358 result = (u0 != u1);
359 break;
360 case TCG_COND_LT:
361 result = (i0 < i1);
362 break;
363 case TCG_COND_GE:
364 result = (i0 >= i1);
365 break;
366 case TCG_COND_LE:
367 result = (i0 <= i1);
368 break;
369 case TCG_COND_GT:
370 result = (i0 > i1);
371 break;
372 case TCG_COND_LTU:
373 result = (u0 < u1);
374 break;
375 case TCG_COND_GEU:
376 result = (u0 >= u1);
377 break;
378 case TCG_COND_LEU:
379 result = (u0 <= u1);
380 break;
381 case TCG_COND_GTU:
382 result = (u0 > u1);
383 break;
384 default:
385 TODO();
387 return result;
390 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
392 bool result = false;
393 int64_t i0 = u0;
394 int64_t i1 = u1;
395 switch (condition) {
396 case TCG_COND_EQ:
397 result = (u0 == u1);
398 break;
399 case TCG_COND_NE:
400 result = (u0 != u1);
401 break;
402 case TCG_COND_LT:
403 result = (i0 < i1);
404 break;
405 case TCG_COND_GE:
406 result = (i0 >= i1);
407 break;
408 case TCG_COND_LE:
409 result = (i0 <= i1);
410 break;
411 case TCG_COND_GT:
412 result = (i0 > i1);
413 break;
414 case TCG_COND_LTU:
415 result = (u0 < u1);
416 break;
417 case TCG_COND_GEU:
418 result = (u0 >= u1);
419 break;
420 case TCG_COND_LEU:
421 result = (u0 <= u1);
422 break;
423 case TCG_COND_GTU:
424 result = (u0 > u1);
425 break;
426 default:
427 TODO();
429 return result;
432 #ifdef CONFIG_SOFTMMU
433 # define qemu_ld_ub \
434 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
435 # define qemu_ld_leuw \
436 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
437 # define qemu_ld_leul \
438 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
439 # define qemu_ld_leq \
440 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
441 # define qemu_ld_beuw \
442 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
443 # define qemu_ld_beul \
444 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
445 # define qemu_ld_beq \
446 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
447 # define qemu_st_b(X) \
448 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
449 # define qemu_st_lew(X) \
450 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
451 # define qemu_st_lel(X) \
452 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
453 # define qemu_st_leq(X) \
454 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
455 # define qemu_st_bew(X) \
456 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
457 # define qemu_st_bel(X) \
458 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
459 # define qemu_st_beq(X) \
460 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
461 #else
462 # define qemu_ld_ub ldub_p(g2h(taddr))
463 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
464 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
465 # define qemu_ld_leq ldq_le_p(g2h(taddr))
466 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
467 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
468 # define qemu_ld_beq ldq_be_p(g2h(taddr))
469 # define qemu_st_b(X) stb_p(g2h(taddr), X)
470 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
471 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
472 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
473 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
474 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
475 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
476 #endif
478 /* Interpret pseudo code in tb. */
480 * Disable CFI checks.
481 * One possible operation in the pseudo code is a call to binary code.
482 * Therefore, disable CFI checks in the interpreter function
484 QEMU_DISABLE_CFI
485 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
487 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
488 long tcg_temps[CPU_TEMP_BUF_NLONGS];
489 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
490 uintptr_t ret = 0;
492 regs[TCG_AREG0] = (tcg_target_ulong)env;
493 regs[TCG_REG_CALL_STACK] = sp_value;
494 tci_assert(tb_ptr);
496 for (;;) {
497 TCGOpcode opc = tb_ptr[0];
498 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
499 uint8_t op_size = tb_ptr[1];
500 uint8_t *old_code_ptr = tb_ptr;
501 #endif
502 tcg_target_ulong t0;
503 tcg_target_ulong t1;
504 tcg_target_ulong t2;
505 tcg_target_ulong label;
506 TCGCond condition;
507 target_ulong taddr;
508 uint8_t tmp8;
509 uint16_t tmp16;
510 uint32_t tmp32;
511 uint64_t tmp64;
512 #if TCG_TARGET_REG_BITS == 32
513 uint64_t v64;
514 #endif
515 TCGMemOpIdx oi;
517 #if defined(GETPC)
518 tci_tb_ptr = (uintptr_t)tb_ptr;
519 #endif
521 /* Skip opcode and size entry. */
522 tb_ptr += 2;
524 switch (opc) {
525 case INDEX_op_call:
526 t0 = tci_read_ri(regs, &tb_ptr);
527 #if TCG_TARGET_REG_BITS == 32
528 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
529 tci_read_reg(regs, TCG_REG_R1),
530 tci_read_reg(regs, TCG_REG_R2),
531 tci_read_reg(regs, TCG_REG_R3),
532 tci_read_reg(regs, TCG_REG_R5),
533 tci_read_reg(regs, TCG_REG_R6),
534 tci_read_reg(regs, TCG_REG_R7),
535 tci_read_reg(regs, TCG_REG_R8),
536 tci_read_reg(regs, TCG_REG_R9),
537 tci_read_reg(regs, TCG_REG_R10),
538 tci_read_reg(regs, TCG_REG_R11),
539 tci_read_reg(regs, TCG_REG_R12));
540 tci_write_reg(regs, TCG_REG_R0, tmp64);
541 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
542 #else
543 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
544 tci_read_reg(regs, TCG_REG_R1),
545 tci_read_reg(regs, TCG_REG_R2),
546 tci_read_reg(regs, TCG_REG_R3),
547 tci_read_reg(regs, TCG_REG_R5),
548 tci_read_reg(regs, TCG_REG_R6));
549 tci_write_reg(regs, TCG_REG_R0, tmp64);
550 #endif
551 break;
552 case INDEX_op_br:
553 label = tci_read_label(&tb_ptr);
554 tci_assert(tb_ptr == old_code_ptr + op_size);
555 tb_ptr = (uint8_t *)label;
556 continue;
557 case INDEX_op_setcond_i32:
558 t0 = *tb_ptr++;
559 t1 = tci_read_r32(regs, &tb_ptr);
560 t2 = tci_read_ri32(regs, &tb_ptr);
561 condition = *tb_ptr++;
562 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
563 break;
564 #if TCG_TARGET_REG_BITS == 32
565 case INDEX_op_setcond2_i32:
566 t0 = *tb_ptr++;
567 tmp64 = tci_read_r64(regs, &tb_ptr);
568 v64 = tci_read_ri64(regs, &tb_ptr);
569 condition = *tb_ptr++;
570 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
571 break;
572 #elif TCG_TARGET_REG_BITS == 64
573 case INDEX_op_setcond_i64:
574 t0 = *tb_ptr++;
575 t1 = tci_read_r64(regs, &tb_ptr);
576 t2 = tci_read_ri64(regs, &tb_ptr);
577 condition = *tb_ptr++;
578 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
579 break;
580 #endif
581 case INDEX_op_mov_i32:
582 t0 = *tb_ptr++;
583 t1 = tci_read_r32(regs, &tb_ptr);
584 tci_write_reg32(regs, t0, t1);
585 break;
586 case INDEX_op_movi_i32:
587 t0 = *tb_ptr++;
588 t1 = tci_read_i32(&tb_ptr);
589 tci_write_reg32(regs, t0, t1);
590 break;
592 /* Load/store operations (32 bit). */
594 case INDEX_op_ld8u_i32:
595 t0 = *tb_ptr++;
596 t1 = tci_read_r(regs, &tb_ptr);
597 t2 = tci_read_s32(&tb_ptr);
598 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
599 break;
600 case INDEX_op_ld8s_i32:
601 TODO();
602 break;
603 case INDEX_op_ld16u_i32:
604 TODO();
605 break;
606 case INDEX_op_ld16s_i32:
607 TODO();
608 break;
609 case INDEX_op_ld_i32:
610 t0 = *tb_ptr++;
611 t1 = tci_read_r(regs, &tb_ptr);
612 t2 = tci_read_s32(&tb_ptr);
613 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
614 break;
615 case INDEX_op_st8_i32:
616 t0 = tci_read_r8(regs, &tb_ptr);
617 t1 = tci_read_r(regs, &tb_ptr);
618 t2 = tci_read_s32(&tb_ptr);
619 *(uint8_t *)(t1 + t2) = t0;
620 break;
621 case INDEX_op_st16_i32:
622 t0 = tci_read_r16(regs, &tb_ptr);
623 t1 = tci_read_r(regs, &tb_ptr);
624 t2 = tci_read_s32(&tb_ptr);
625 *(uint16_t *)(t1 + t2) = t0;
626 break;
627 case INDEX_op_st_i32:
628 t0 = tci_read_r32(regs, &tb_ptr);
629 t1 = tci_read_r(regs, &tb_ptr);
630 t2 = tci_read_s32(&tb_ptr);
631 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
632 *(uint32_t *)(t1 + t2) = t0;
633 break;
635 /* Arithmetic operations (32 bit). */
637 case INDEX_op_add_i32:
638 t0 = *tb_ptr++;
639 t1 = tci_read_ri32(regs, &tb_ptr);
640 t2 = tci_read_ri32(regs, &tb_ptr);
641 tci_write_reg32(regs, t0, t1 + t2);
642 break;
643 case INDEX_op_sub_i32:
644 t0 = *tb_ptr++;
645 t1 = tci_read_ri32(regs, &tb_ptr);
646 t2 = tci_read_ri32(regs, &tb_ptr);
647 tci_write_reg32(regs, t0, t1 - t2);
648 break;
649 case INDEX_op_mul_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(regs, &tb_ptr);
652 t2 = tci_read_ri32(regs, &tb_ptr);
653 tci_write_reg32(regs, t0, t1 * t2);
654 break;
655 #if TCG_TARGET_HAS_div_i32
656 case INDEX_op_div_i32:
657 t0 = *tb_ptr++;
658 t1 = tci_read_ri32(regs, &tb_ptr);
659 t2 = tci_read_ri32(regs, &tb_ptr);
660 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
661 break;
662 case INDEX_op_divu_i32:
663 t0 = *tb_ptr++;
664 t1 = tci_read_ri32(regs, &tb_ptr);
665 t2 = tci_read_ri32(regs, &tb_ptr);
666 tci_write_reg32(regs, t0, t1 / t2);
667 break;
668 case INDEX_op_rem_i32:
669 t0 = *tb_ptr++;
670 t1 = tci_read_ri32(regs, &tb_ptr);
671 t2 = tci_read_ri32(regs, &tb_ptr);
672 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
673 break;
674 case INDEX_op_remu_i32:
675 t0 = *tb_ptr++;
676 t1 = tci_read_ri32(regs, &tb_ptr);
677 t2 = tci_read_ri32(regs, &tb_ptr);
678 tci_write_reg32(regs, t0, t1 % t2);
679 break;
680 #elif TCG_TARGET_HAS_div2_i32
681 case INDEX_op_div2_i32:
682 case INDEX_op_divu2_i32:
683 TODO();
684 break;
685 #endif
686 case INDEX_op_and_i32:
687 t0 = *tb_ptr++;
688 t1 = tci_read_ri32(regs, &tb_ptr);
689 t2 = tci_read_ri32(regs, &tb_ptr);
690 tci_write_reg32(regs, t0, t1 & t2);
691 break;
692 case INDEX_op_or_i32:
693 t0 = *tb_ptr++;
694 t1 = tci_read_ri32(regs, &tb_ptr);
695 t2 = tci_read_ri32(regs, &tb_ptr);
696 tci_write_reg32(regs, t0, t1 | t2);
697 break;
698 case INDEX_op_xor_i32:
699 t0 = *tb_ptr++;
700 t1 = tci_read_ri32(regs, &tb_ptr);
701 t2 = tci_read_ri32(regs, &tb_ptr);
702 tci_write_reg32(regs, t0, t1 ^ t2);
703 break;
705 /* Shift/rotate operations (32 bit). */
707 case INDEX_op_shl_i32:
708 t0 = *tb_ptr++;
709 t1 = tci_read_ri32(regs, &tb_ptr);
710 t2 = tci_read_ri32(regs, &tb_ptr);
711 tci_write_reg32(regs, t0, t1 << (t2 & 31));
712 break;
713 case INDEX_op_shr_i32:
714 t0 = *tb_ptr++;
715 t1 = tci_read_ri32(regs, &tb_ptr);
716 t2 = tci_read_ri32(regs, &tb_ptr);
717 tci_write_reg32(regs, t0, t1 >> (t2 & 31));
718 break;
719 case INDEX_op_sar_i32:
720 t0 = *tb_ptr++;
721 t1 = tci_read_ri32(regs, &tb_ptr);
722 t2 = tci_read_ri32(regs, &tb_ptr);
723 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
724 break;
725 #if TCG_TARGET_HAS_rot_i32
726 case INDEX_op_rotl_i32:
727 t0 = *tb_ptr++;
728 t1 = tci_read_ri32(regs, &tb_ptr);
729 t2 = tci_read_ri32(regs, &tb_ptr);
730 tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
731 break;
732 case INDEX_op_rotr_i32:
733 t0 = *tb_ptr++;
734 t1 = tci_read_ri32(regs, &tb_ptr);
735 t2 = tci_read_ri32(regs, &tb_ptr);
736 tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
737 break;
738 #endif
739 #if TCG_TARGET_HAS_deposit_i32
740 case INDEX_op_deposit_i32:
741 t0 = *tb_ptr++;
742 t1 = tci_read_r32(regs, &tb_ptr);
743 t2 = tci_read_r32(regs, &tb_ptr);
744 tmp16 = *tb_ptr++;
745 tmp8 = *tb_ptr++;
746 tmp32 = (((1 << tmp8) - 1) << tmp16);
747 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
748 break;
749 #endif
750 case INDEX_op_brcond_i32:
751 t0 = tci_read_r32(regs, &tb_ptr);
752 t1 = tci_read_ri32(regs, &tb_ptr);
753 condition = *tb_ptr++;
754 label = tci_read_label(&tb_ptr);
755 if (tci_compare32(t0, t1, condition)) {
756 tci_assert(tb_ptr == old_code_ptr + op_size);
757 tb_ptr = (uint8_t *)label;
758 continue;
760 break;
761 #if TCG_TARGET_REG_BITS == 32
762 case INDEX_op_add2_i32:
763 t0 = *tb_ptr++;
764 t1 = *tb_ptr++;
765 tmp64 = tci_read_r64(regs, &tb_ptr);
766 tmp64 += tci_read_r64(regs, &tb_ptr);
767 tci_write_reg64(regs, t1, t0, tmp64);
768 break;
769 case INDEX_op_sub2_i32:
770 t0 = *tb_ptr++;
771 t1 = *tb_ptr++;
772 tmp64 = tci_read_r64(regs, &tb_ptr);
773 tmp64 -= tci_read_r64(regs, &tb_ptr);
774 tci_write_reg64(regs, t1, t0, tmp64);
775 break;
776 case INDEX_op_brcond2_i32:
777 tmp64 = tci_read_r64(regs, &tb_ptr);
778 v64 = tci_read_ri64(regs, &tb_ptr);
779 condition = *tb_ptr++;
780 label = tci_read_label(&tb_ptr);
781 if (tci_compare64(tmp64, v64, condition)) {
782 tci_assert(tb_ptr == old_code_ptr + op_size);
783 tb_ptr = (uint8_t *)label;
784 continue;
786 break;
787 case INDEX_op_mulu2_i32:
788 t0 = *tb_ptr++;
789 t1 = *tb_ptr++;
790 t2 = tci_read_r32(regs, &tb_ptr);
791 tmp64 = tci_read_r32(regs, &tb_ptr);
792 tci_write_reg64(regs, t1, t0, t2 * tmp64);
793 break;
794 #endif /* TCG_TARGET_REG_BITS == 32 */
795 #if TCG_TARGET_HAS_ext8s_i32
796 case INDEX_op_ext8s_i32:
797 t0 = *tb_ptr++;
798 t1 = tci_read_r8s(regs, &tb_ptr);
799 tci_write_reg32(regs, t0, t1);
800 break;
801 #endif
802 #if TCG_TARGET_HAS_ext16s_i32
803 case INDEX_op_ext16s_i32:
804 t0 = *tb_ptr++;
805 t1 = tci_read_r16s(regs, &tb_ptr);
806 tci_write_reg32(regs, t0, t1);
807 break;
808 #endif
809 #if TCG_TARGET_HAS_ext8u_i32
810 case INDEX_op_ext8u_i32:
811 t0 = *tb_ptr++;
812 t1 = tci_read_r8(regs, &tb_ptr);
813 tci_write_reg32(regs, t0, t1);
814 break;
815 #endif
816 #if TCG_TARGET_HAS_ext16u_i32
817 case INDEX_op_ext16u_i32:
818 t0 = *tb_ptr++;
819 t1 = tci_read_r16(regs, &tb_ptr);
820 tci_write_reg32(regs, t0, t1);
821 break;
822 #endif
823 #if TCG_TARGET_HAS_bswap16_i32
824 case INDEX_op_bswap16_i32:
825 t0 = *tb_ptr++;
826 t1 = tci_read_r16(regs, &tb_ptr);
827 tci_write_reg32(regs, t0, bswap16(t1));
828 break;
829 #endif
830 #if TCG_TARGET_HAS_bswap32_i32
831 case INDEX_op_bswap32_i32:
832 t0 = *tb_ptr++;
833 t1 = tci_read_r32(regs, &tb_ptr);
834 tci_write_reg32(regs, t0, bswap32(t1));
835 break;
836 #endif
837 #if TCG_TARGET_HAS_not_i32
838 case INDEX_op_not_i32:
839 t0 = *tb_ptr++;
840 t1 = tci_read_r32(regs, &tb_ptr);
841 tci_write_reg32(regs, t0, ~t1);
842 break;
843 #endif
844 #if TCG_TARGET_HAS_neg_i32
845 case INDEX_op_neg_i32:
846 t0 = *tb_ptr++;
847 t1 = tci_read_r32(regs, &tb_ptr);
848 tci_write_reg32(regs, t0, -t1);
849 break;
850 #endif
851 #if TCG_TARGET_REG_BITS == 64
852 case INDEX_op_mov_i64:
853 t0 = *tb_ptr++;
854 t1 = tci_read_r64(regs, &tb_ptr);
855 tci_write_reg64(regs, t0, t1);
856 break;
857 case INDEX_op_movi_i64:
858 t0 = *tb_ptr++;
859 t1 = tci_read_i64(&tb_ptr);
860 tci_write_reg64(regs, t0, t1);
861 break;
863 /* Load/store operations (64 bit). */
865 case INDEX_op_ld8u_i64:
866 t0 = *tb_ptr++;
867 t1 = tci_read_r(regs, &tb_ptr);
868 t2 = tci_read_s32(&tb_ptr);
869 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
870 break;
871 case INDEX_op_ld8s_i64:
872 TODO();
873 break;
874 case INDEX_op_ld16u_i64:
875 t0 = *tb_ptr++;
876 t1 = tci_read_r(regs, &tb_ptr);
877 t2 = tci_read_s32(&tb_ptr);
878 tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2));
879 break;
880 case INDEX_op_ld16s_i64:
881 TODO();
882 break;
883 case INDEX_op_ld32u_i64:
884 t0 = *tb_ptr++;
885 t1 = tci_read_r(regs, &tb_ptr);
886 t2 = tci_read_s32(&tb_ptr);
887 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
888 break;
889 case INDEX_op_ld32s_i64:
890 t0 = *tb_ptr++;
891 t1 = tci_read_r(regs, &tb_ptr);
892 t2 = tci_read_s32(&tb_ptr);
893 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
894 break;
895 case INDEX_op_ld_i64:
896 t0 = *tb_ptr++;
897 t1 = tci_read_r(regs, &tb_ptr);
898 t2 = tci_read_s32(&tb_ptr);
899 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
900 break;
901 case INDEX_op_st8_i64:
902 t0 = tci_read_r8(regs, &tb_ptr);
903 t1 = tci_read_r(regs, &tb_ptr);
904 t2 = tci_read_s32(&tb_ptr);
905 *(uint8_t *)(t1 + t2) = t0;
906 break;
907 case INDEX_op_st16_i64:
908 t0 = tci_read_r16(regs, &tb_ptr);
909 t1 = tci_read_r(regs, &tb_ptr);
910 t2 = tci_read_s32(&tb_ptr);
911 *(uint16_t *)(t1 + t2) = t0;
912 break;
913 case INDEX_op_st32_i64:
914 t0 = tci_read_r32(regs, &tb_ptr);
915 t1 = tci_read_r(regs, &tb_ptr);
916 t2 = tci_read_s32(&tb_ptr);
917 *(uint32_t *)(t1 + t2) = t0;
918 break;
919 case INDEX_op_st_i64:
920 t0 = tci_read_r64(regs, &tb_ptr);
921 t1 = tci_read_r(regs, &tb_ptr);
922 t2 = tci_read_s32(&tb_ptr);
923 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
924 *(uint64_t *)(t1 + t2) = t0;
925 break;
927 /* Arithmetic operations (64 bit). */
929 case INDEX_op_add_i64:
930 t0 = *tb_ptr++;
931 t1 = tci_read_ri64(regs, &tb_ptr);
932 t2 = tci_read_ri64(regs, &tb_ptr);
933 tci_write_reg64(regs, t0, t1 + t2);
934 break;
935 case INDEX_op_sub_i64:
936 t0 = *tb_ptr++;
937 t1 = tci_read_ri64(regs, &tb_ptr);
938 t2 = tci_read_ri64(regs, &tb_ptr);
939 tci_write_reg64(regs, t0, t1 - t2);
940 break;
941 case INDEX_op_mul_i64:
942 t0 = *tb_ptr++;
943 t1 = tci_read_ri64(regs, &tb_ptr);
944 t2 = tci_read_ri64(regs, &tb_ptr);
945 tci_write_reg64(regs, t0, t1 * t2);
946 break;
947 #if TCG_TARGET_HAS_div_i64
948 case INDEX_op_div_i64:
949 case INDEX_op_divu_i64:
950 case INDEX_op_rem_i64:
951 case INDEX_op_remu_i64:
952 TODO();
953 break;
954 #elif TCG_TARGET_HAS_div2_i64
955 case INDEX_op_div2_i64:
956 case INDEX_op_divu2_i64:
957 TODO();
958 break;
959 #endif
960 case INDEX_op_and_i64:
961 t0 = *tb_ptr++;
962 t1 = tci_read_ri64(regs, &tb_ptr);
963 t2 = tci_read_ri64(regs, &tb_ptr);
964 tci_write_reg64(regs, t0, t1 & t2);
965 break;
966 case INDEX_op_or_i64:
967 t0 = *tb_ptr++;
968 t1 = tci_read_ri64(regs, &tb_ptr);
969 t2 = tci_read_ri64(regs, &tb_ptr);
970 tci_write_reg64(regs, t0, t1 | t2);
971 break;
972 case INDEX_op_xor_i64:
973 t0 = *tb_ptr++;
974 t1 = tci_read_ri64(regs, &tb_ptr);
975 t2 = tci_read_ri64(regs, &tb_ptr);
976 tci_write_reg64(regs, t0, t1 ^ t2);
977 break;
979 /* Shift/rotate operations (64 bit). */
981 case INDEX_op_shl_i64:
982 t0 = *tb_ptr++;
983 t1 = tci_read_ri64(regs, &tb_ptr);
984 t2 = tci_read_ri64(regs, &tb_ptr);
985 tci_write_reg64(regs, t0, t1 << (t2 & 63));
986 break;
987 case INDEX_op_shr_i64:
988 t0 = *tb_ptr++;
989 t1 = tci_read_ri64(regs, &tb_ptr);
990 t2 = tci_read_ri64(regs, &tb_ptr);
991 tci_write_reg64(regs, t0, t1 >> (t2 & 63));
992 break;
993 case INDEX_op_sar_i64:
994 t0 = *tb_ptr++;
995 t1 = tci_read_ri64(regs, &tb_ptr);
996 t2 = tci_read_ri64(regs, &tb_ptr);
997 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
998 break;
999 #if TCG_TARGET_HAS_rot_i64
1000 case INDEX_op_rotl_i64:
1001 t0 = *tb_ptr++;
1002 t1 = tci_read_ri64(regs, &tb_ptr);
1003 t2 = tci_read_ri64(regs, &tb_ptr);
1004 tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
1005 break;
1006 case INDEX_op_rotr_i64:
1007 t0 = *tb_ptr++;
1008 t1 = tci_read_ri64(regs, &tb_ptr);
1009 t2 = tci_read_ri64(regs, &tb_ptr);
1010 tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
1011 break;
1012 #endif
1013 #if TCG_TARGET_HAS_deposit_i64
1014 case INDEX_op_deposit_i64:
1015 t0 = *tb_ptr++;
1016 t1 = tci_read_r64(regs, &tb_ptr);
1017 t2 = tci_read_r64(regs, &tb_ptr);
1018 tmp16 = *tb_ptr++;
1019 tmp8 = *tb_ptr++;
1020 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
1021 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
1022 break;
1023 #endif
1024 case INDEX_op_brcond_i64:
1025 t0 = tci_read_r64(regs, &tb_ptr);
1026 t1 = tci_read_ri64(regs, &tb_ptr);
1027 condition = *tb_ptr++;
1028 label = tci_read_label(&tb_ptr);
1029 if (tci_compare64(t0, t1, condition)) {
1030 tci_assert(tb_ptr == old_code_ptr + op_size);
1031 tb_ptr = (uint8_t *)label;
1032 continue;
1034 break;
1035 #if TCG_TARGET_HAS_ext8u_i64
1036 case INDEX_op_ext8u_i64:
1037 t0 = *tb_ptr++;
1038 t1 = tci_read_r8(regs, &tb_ptr);
1039 tci_write_reg64(regs, t0, t1);
1040 break;
1041 #endif
1042 #if TCG_TARGET_HAS_ext8s_i64
1043 case INDEX_op_ext8s_i64:
1044 t0 = *tb_ptr++;
1045 t1 = tci_read_r8s(regs, &tb_ptr);
1046 tci_write_reg64(regs, t0, t1);
1047 break;
1048 #endif
1049 #if TCG_TARGET_HAS_ext16s_i64
1050 case INDEX_op_ext16s_i64:
1051 t0 = *tb_ptr++;
1052 t1 = tci_read_r16s(regs, &tb_ptr);
1053 tci_write_reg64(regs, t0, t1);
1054 break;
1055 #endif
1056 #if TCG_TARGET_HAS_ext16u_i64
1057 case INDEX_op_ext16u_i64:
1058 t0 = *tb_ptr++;
1059 t1 = tci_read_r16(regs, &tb_ptr);
1060 tci_write_reg64(regs, t0, t1);
1061 break;
1062 #endif
1063 #if TCG_TARGET_HAS_ext32s_i64
1064 case INDEX_op_ext32s_i64:
1065 #endif
1066 case INDEX_op_ext_i32_i64:
1067 t0 = *tb_ptr++;
1068 t1 = tci_read_r32s(regs, &tb_ptr);
1069 tci_write_reg64(regs, t0, t1);
1070 break;
1071 #if TCG_TARGET_HAS_ext32u_i64
1072 case INDEX_op_ext32u_i64:
1073 #endif
1074 case INDEX_op_extu_i32_i64:
1075 t0 = *tb_ptr++;
1076 t1 = tci_read_r32(regs, &tb_ptr);
1077 tci_write_reg64(regs, t0, t1);
1078 break;
1079 #if TCG_TARGET_HAS_bswap16_i64
1080 case INDEX_op_bswap16_i64:
1081 t0 = *tb_ptr++;
1082 t1 = tci_read_r16(regs, &tb_ptr);
1083 tci_write_reg64(regs, t0, bswap16(t1));
1084 break;
1085 #endif
1086 #if TCG_TARGET_HAS_bswap32_i64
1087 case INDEX_op_bswap32_i64:
1088 t0 = *tb_ptr++;
1089 t1 = tci_read_r32(regs, &tb_ptr);
1090 tci_write_reg64(regs, t0, bswap32(t1));
1091 break;
1092 #endif
1093 #if TCG_TARGET_HAS_bswap64_i64
1094 case INDEX_op_bswap64_i64:
1095 t0 = *tb_ptr++;
1096 t1 = tci_read_r64(regs, &tb_ptr);
1097 tci_write_reg64(regs, t0, bswap64(t1));
1098 break;
1099 #endif
1100 #if TCG_TARGET_HAS_not_i64
1101 case INDEX_op_not_i64:
1102 t0 = *tb_ptr++;
1103 t1 = tci_read_r64(regs, &tb_ptr);
1104 tci_write_reg64(regs, t0, ~t1);
1105 break;
1106 #endif
1107 #if TCG_TARGET_HAS_neg_i64
1108 case INDEX_op_neg_i64:
1109 t0 = *tb_ptr++;
1110 t1 = tci_read_r64(regs, &tb_ptr);
1111 tci_write_reg64(regs, t0, -t1);
1112 break;
1113 #endif
1114 #endif /* TCG_TARGET_REG_BITS == 64 */
1116 /* QEMU specific operations. */
1118 case INDEX_op_exit_tb:
1119 ret = *(uint64_t *)tb_ptr;
1120 goto exit;
1121 break;
1122 case INDEX_op_goto_tb:
1123 /* Jump address is aligned */
1124 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1125 t0 = qatomic_read((int32_t *)tb_ptr);
1126 tb_ptr += sizeof(int32_t);
1127 tci_assert(tb_ptr == old_code_ptr + op_size);
1128 tb_ptr += (int32_t)t0;
1129 continue;
1130 case INDEX_op_qemu_ld_i32:
1131 t0 = *tb_ptr++;
1132 taddr = tci_read_ulong(regs, &tb_ptr);
1133 oi = tci_read_i(&tb_ptr);
1134 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1135 case MO_UB:
1136 tmp32 = qemu_ld_ub;
1137 break;
1138 case MO_SB:
1139 tmp32 = (int8_t)qemu_ld_ub;
1140 break;
1141 case MO_LEUW:
1142 tmp32 = qemu_ld_leuw;
1143 break;
1144 case MO_LESW:
1145 tmp32 = (int16_t)qemu_ld_leuw;
1146 break;
1147 case MO_LEUL:
1148 tmp32 = qemu_ld_leul;
1149 break;
1150 case MO_BEUW:
1151 tmp32 = qemu_ld_beuw;
1152 break;
1153 case MO_BESW:
1154 tmp32 = (int16_t)qemu_ld_beuw;
1155 break;
1156 case MO_BEUL:
1157 tmp32 = qemu_ld_beul;
1158 break;
1159 default:
1160 tcg_abort();
1162 tci_write_reg(regs, t0, tmp32);
1163 break;
1164 case INDEX_op_qemu_ld_i64:
1165 t0 = *tb_ptr++;
1166 if (TCG_TARGET_REG_BITS == 32) {
1167 t1 = *tb_ptr++;
1169 taddr = tci_read_ulong(regs, &tb_ptr);
1170 oi = tci_read_i(&tb_ptr);
1171 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1172 case MO_UB:
1173 tmp64 = qemu_ld_ub;
1174 break;
1175 case MO_SB:
1176 tmp64 = (int8_t)qemu_ld_ub;
1177 break;
1178 case MO_LEUW:
1179 tmp64 = qemu_ld_leuw;
1180 break;
1181 case MO_LESW:
1182 tmp64 = (int16_t)qemu_ld_leuw;
1183 break;
1184 case MO_LEUL:
1185 tmp64 = qemu_ld_leul;
1186 break;
1187 case MO_LESL:
1188 tmp64 = (int32_t)qemu_ld_leul;
1189 break;
1190 case MO_LEQ:
1191 tmp64 = qemu_ld_leq;
1192 break;
1193 case MO_BEUW:
1194 tmp64 = qemu_ld_beuw;
1195 break;
1196 case MO_BESW:
1197 tmp64 = (int16_t)qemu_ld_beuw;
1198 break;
1199 case MO_BEUL:
1200 tmp64 = qemu_ld_beul;
1201 break;
1202 case MO_BESL:
1203 tmp64 = (int32_t)qemu_ld_beul;
1204 break;
1205 case MO_BEQ:
1206 tmp64 = qemu_ld_beq;
1207 break;
1208 default:
1209 tcg_abort();
1211 tci_write_reg(regs, t0, tmp64);
1212 if (TCG_TARGET_REG_BITS == 32) {
1213 tci_write_reg(regs, t1, tmp64 >> 32);
1215 break;
1216 case INDEX_op_qemu_st_i32:
1217 t0 = tci_read_r(regs, &tb_ptr);
1218 taddr = tci_read_ulong(regs, &tb_ptr);
1219 oi = tci_read_i(&tb_ptr);
1220 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1221 case MO_UB:
1222 qemu_st_b(t0);
1223 break;
1224 case MO_LEUW:
1225 qemu_st_lew(t0);
1226 break;
1227 case MO_LEUL:
1228 qemu_st_lel(t0);
1229 break;
1230 case MO_BEUW:
1231 qemu_st_bew(t0);
1232 break;
1233 case MO_BEUL:
1234 qemu_st_bel(t0);
1235 break;
1236 default:
1237 tcg_abort();
1239 break;
1240 case INDEX_op_qemu_st_i64:
1241 tmp64 = tci_read_r64(regs, &tb_ptr);
1242 taddr = tci_read_ulong(regs, &tb_ptr);
1243 oi = tci_read_i(&tb_ptr);
1244 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1245 case MO_UB:
1246 qemu_st_b(tmp64);
1247 break;
1248 case MO_LEUW:
1249 qemu_st_lew(tmp64);
1250 break;
1251 case MO_LEUL:
1252 qemu_st_lel(tmp64);
1253 break;
1254 case MO_LEQ:
1255 qemu_st_leq(tmp64);
1256 break;
1257 case MO_BEUW:
1258 qemu_st_bew(tmp64);
1259 break;
1260 case MO_BEUL:
1261 qemu_st_bel(tmp64);
1262 break;
1263 case MO_BEQ:
1264 qemu_st_beq(tmp64);
1265 break;
1266 default:
1267 tcg_abort();
1269 break;
1270 case INDEX_op_mb:
1271 /* Ensure ordering for all kinds */
1272 smp_mb();
1273 break;
1274 default:
1275 TODO();
1276 break;
1278 tci_assert(tb_ptr == old_code_ptr + op_size);
1280 exit:
1281 return ret;