virtio-input: fix memory leak on unrealize
[qemu/ar7.git] / tcg / tci.c
bloba6208653e839ac9e8f600a93a7280af33c0fd15b
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg-op.h"
35 /* Marker for missing code. */
36 #define TODO() \
37 do { \
38 fprintf(stderr, "TODO %s:%u: %s()\n", \
39 __FILE__, __LINE__, __func__); \
40 tcg_abort(); \
41 } while (0)
43 #if MAX_OPC_PARAM_IARGS != 6
44 # error Fix needed, number of supported input arguments changed!
45 #endif
46 #if TCG_TARGET_REG_BITS == 32
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong,
52 tcg_target_ulong, tcg_target_ulong);
53 #else
54 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
55 tcg_target_ulong, tcg_target_ulong,
56 tcg_target_ulong, tcg_target_ulong);
57 #endif
59 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
61 tci_assert(index < TCG_TARGET_NB_REGS);
62 return regs[index];
65 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
66 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
68 return (int8_t)tci_read_reg(regs, index);
70 #endif
72 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
73 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
75 return (int16_t)tci_read_reg(regs, index);
77 #endif
79 #if TCG_TARGET_REG_BITS == 64
80 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
82 return (int32_t)tci_read_reg(regs, index);
84 #endif
86 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
88 return (uint8_t)tci_read_reg(regs, index);
91 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
93 return (uint16_t)tci_read_reg(regs, index);
96 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
98 return (uint32_t)tci_read_reg(regs, index);
101 #if TCG_TARGET_REG_BITS == 64
102 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
104 return tci_read_reg(regs, index);
106 #endif
108 static void
109 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
111 tci_assert(index < TCG_TARGET_NB_REGS);
112 tci_assert(index != TCG_AREG0);
113 tci_assert(index != TCG_REG_CALL_STACK);
114 regs[index] = value;
117 #if TCG_TARGET_REG_BITS == 64
118 static void
119 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
121 tci_write_reg(regs, index, value);
123 #endif
125 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
127 tci_write_reg(regs, index, value);
130 static void
131 tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value)
133 tci_write_reg(regs, index, value);
136 static void
137 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
139 tci_write_reg(regs, index, value);
142 #if TCG_TARGET_REG_BITS == 32
143 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
144 uint32_t low_index, uint64_t value)
146 tci_write_reg(regs, low_index, value);
147 tci_write_reg(regs, high_index, value >> 32);
149 #elif TCG_TARGET_REG_BITS == 64
150 static void
151 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
153 tci_write_reg(regs, index, value);
155 #endif
157 #if TCG_TARGET_REG_BITS == 32
158 /* Create a 64 bit value from two 32 bit values. */
159 static uint64_t tci_uint64(uint32_t high, uint32_t low)
161 return ((uint64_t)high << 32) + low;
163 #endif
165 /* Read constant (native size) from bytecode. */
166 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
168 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
169 *tb_ptr += sizeof(value);
170 return value;
173 /* Read unsigned constant (32 bit) from bytecode. */
174 static uint32_t tci_read_i32(uint8_t **tb_ptr)
176 uint32_t value = *(uint32_t *)(*tb_ptr);
177 *tb_ptr += sizeof(value);
178 return value;
181 /* Read signed constant (32 bit) from bytecode. */
182 static int32_t tci_read_s32(uint8_t **tb_ptr)
184 int32_t value = *(int32_t *)(*tb_ptr);
185 *tb_ptr += sizeof(value);
186 return value;
189 #if TCG_TARGET_REG_BITS == 64
190 /* Read constant (64 bit) from bytecode. */
191 static uint64_t tci_read_i64(uint8_t **tb_ptr)
193 uint64_t value = *(uint64_t *)(*tb_ptr);
194 *tb_ptr += sizeof(value);
195 return value;
197 #endif
199 /* Read indexed register (native size) from bytecode. */
200 static tcg_target_ulong
201 tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
203 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
204 *tb_ptr += 1;
205 return value;
208 /* Read indexed register (8 bit) from bytecode. */
209 static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
211 uint8_t value = tci_read_reg8(regs, **tb_ptr);
212 *tb_ptr += 1;
213 return value;
216 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
217 /* Read indexed register (8 bit signed) from bytecode. */
218 static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
220 int8_t value = tci_read_reg8s(regs, **tb_ptr);
221 *tb_ptr += 1;
222 return value;
224 #endif
226 /* Read indexed register (16 bit) from bytecode. */
227 static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
229 uint16_t value = tci_read_reg16(regs, **tb_ptr);
230 *tb_ptr += 1;
231 return value;
234 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
235 /* Read indexed register (16 bit signed) from bytecode. */
236 static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
238 int16_t value = tci_read_reg16s(regs, **tb_ptr);
239 *tb_ptr += 1;
240 return value;
242 #endif
244 /* Read indexed register (32 bit) from bytecode. */
245 static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
247 uint32_t value = tci_read_reg32(regs, **tb_ptr);
248 *tb_ptr += 1;
249 return value;
252 #if TCG_TARGET_REG_BITS == 32
253 /* Read two indexed registers (2 * 32 bit) from bytecode. */
254 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
256 uint32_t low = tci_read_r32(regs, tb_ptr);
257 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
259 #elif TCG_TARGET_REG_BITS == 64
260 /* Read indexed register (32 bit signed) from bytecode. */
261 static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
263 int32_t value = tci_read_reg32s(regs, **tb_ptr);
264 *tb_ptr += 1;
265 return value;
268 /* Read indexed register (64 bit) from bytecode. */
269 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
271 uint64_t value = tci_read_reg64(regs, **tb_ptr);
272 *tb_ptr += 1;
273 return value;
275 #endif
277 /* Read indexed register(s) with target address from bytecode. */
278 static target_ulong
279 tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
281 target_ulong taddr = tci_read_r(regs, tb_ptr);
282 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
283 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
284 #endif
285 return taddr;
288 /* Read indexed register or constant (native size) from bytecode. */
289 static tcg_target_ulong
290 tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
292 tcg_target_ulong value;
293 TCGReg r = **tb_ptr;
294 *tb_ptr += 1;
295 if (r == TCG_CONST) {
296 value = tci_read_i(tb_ptr);
297 } else {
298 value = tci_read_reg(regs, r);
300 return value;
303 /* Read indexed register or constant (32 bit) from bytecode. */
304 static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
306 uint32_t value;
307 TCGReg r = **tb_ptr;
308 *tb_ptr += 1;
309 if (r == TCG_CONST) {
310 value = tci_read_i32(tb_ptr);
311 } else {
312 value = tci_read_reg32(regs, r);
314 return value;
317 #if TCG_TARGET_REG_BITS == 32
318 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
319 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
321 uint32_t low = tci_read_ri32(regs, tb_ptr);
322 return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
324 #elif TCG_TARGET_REG_BITS == 64
325 /* Read indexed register or constant (64 bit) from bytecode. */
326 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
328 uint64_t value;
329 TCGReg r = **tb_ptr;
330 *tb_ptr += 1;
331 if (r == TCG_CONST) {
332 value = tci_read_i64(tb_ptr);
333 } else {
334 value = tci_read_reg64(regs, r);
336 return value;
338 #endif
340 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
342 tcg_target_ulong label = tci_read_i(tb_ptr);
343 tci_assert(label != 0);
344 return label;
347 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
349 bool result = false;
350 int32_t i0 = u0;
351 int32_t i1 = u1;
352 switch (condition) {
353 case TCG_COND_EQ:
354 result = (u0 == u1);
355 break;
356 case TCG_COND_NE:
357 result = (u0 != u1);
358 break;
359 case TCG_COND_LT:
360 result = (i0 < i1);
361 break;
362 case TCG_COND_GE:
363 result = (i0 >= i1);
364 break;
365 case TCG_COND_LE:
366 result = (i0 <= i1);
367 break;
368 case TCG_COND_GT:
369 result = (i0 > i1);
370 break;
371 case TCG_COND_LTU:
372 result = (u0 < u1);
373 break;
374 case TCG_COND_GEU:
375 result = (u0 >= u1);
376 break;
377 case TCG_COND_LEU:
378 result = (u0 <= u1);
379 break;
380 case TCG_COND_GTU:
381 result = (u0 > u1);
382 break;
383 default:
384 TODO();
386 return result;
389 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
391 bool result = false;
392 int64_t i0 = u0;
393 int64_t i1 = u1;
394 switch (condition) {
395 case TCG_COND_EQ:
396 result = (u0 == u1);
397 break;
398 case TCG_COND_NE:
399 result = (u0 != u1);
400 break;
401 case TCG_COND_LT:
402 result = (i0 < i1);
403 break;
404 case TCG_COND_GE:
405 result = (i0 >= i1);
406 break;
407 case TCG_COND_LE:
408 result = (i0 <= i1);
409 break;
410 case TCG_COND_GT:
411 result = (i0 > i1);
412 break;
413 case TCG_COND_LTU:
414 result = (u0 < u1);
415 break;
416 case TCG_COND_GEU:
417 result = (u0 >= u1);
418 break;
419 case TCG_COND_LEU:
420 result = (u0 <= u1);
421 break;
422 case TCG_COND_GTU:
423 result = (u0 > u1);
424 break;
425 default:
426 TODO();
428 return result;
431 #ifdef CONFIG_SOFTMMU
432 # define qemu_ld_ub \
433 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
434 # define qemu_ld_leuw \
435 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
436 # define qemu_ld_leul \
437 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
438 # define qemu_ld_leq \
439 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
440 # define qemu_ld_beuw \
441 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
442 # define qemu_ld_beul \
443 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
444 # define qemu_ld_beq \
445 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
446 # define qemu_st_b(X) \
447 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
448 # define qemu_st_lew(X) \
449 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
450 # define qemu_st_lel(X) \
451 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
452 # define qemu_st_leq(X) \
453 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
454 # define qemu_st_bew(X) \
455 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
456 # define qemu_st_bel(X) \
457 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
458 # define qemu_st_beq(X) \
459 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
460 #else
461 # define qemu_ld_ub ldub_p(g2h(taddr))
462 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
463 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
464 # define qemu_ld_leq ldq_le_p(g2h(taddr))
465 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
466 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
467 # define qemu_ld_beq ldq_be_p(g2h(taddr))
468 # define qemu_st_b(X) stb_p(g2h(taddr), X)
469 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
470 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
471 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
472 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
473 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
474 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
475 #endif
477 /* Interpret pseudo code in tb. */
478 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
480 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
481 long tcg_temps[CPU_TEMP_BUF_NLONGS];
482 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
483 uintptr_t ret = 0;
485 regs[TCG_AREG0] = (tcg_target_ulong)env;
486 regs[TCG_REG_CALL_STACK] = sp_value;
487 tci_assert(tb_ptr);
489 for (;;) {
490 TCGOpcode opc = tb_ptr[0];
491 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
492 uint8_t op_size = tb_ptr[1];
493 uint8_t *old_code_ptr = tb_ptr;
494 #endif
495 tcg_target_ulong t0;
496 tcg_target_ulong t1;
497 tcg_target_ulong t2;
498 tcg_target_ulong label;
499 TCGCond condition;
500 target_ulong taddr;
501 uint8_t tmp8;
502 uint16_t tmp16;
503 uint32_t tmp32;
504 uint64_t tmp64;
505 #if TCG_TARGET_REG_BITS == 32
506 uint64_t v64;
507 #endif
508 TCGMemOpIdx oi;
510 #if defined(GETPC)
511 tci_tb_ptr = (uintptr_t)tb_ptr;
512 #endif
514 /* Skip opcode and size entry. */
515 tb_ptr += 2;
517 switch (opc) {
518 case INDEX_op_call:
519 t0 = tci_read_ri(regs, &tb_ptr);
520 #if TCG_TARGET_REG_BITS == 32
521 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
522 tci_read_reg(regs, TCG_REG_R1),
523 tci_read_reg(regs, TCG_REG_R2),
524 tci_read_reg(regs, TCG_REG_R3),
525 tci_read_reg(regs, TCG_REG_R5),
526 tci_read_reg(regs, TCG_REG_R6),
527 tci_read_reg(regs, TCG_REG_R7),
528 tci_read_reg(regs, TCG_REG_R8),
529 tci_read_reg(regs, TCG_REG_R9),
530 tci_read_reg(regs, TCG_REG_R10),
531 tci_read_reg(regs, TCG_REG_R11),
532 tci_read_reg(regs, TCG_REG_R12));
533 tci_write_reg(regs, TCG_REG_R0, tmp64);
534 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
535 #else
536 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
537 tci_read_reg(regs, TCG_REG_R1),
538 tci_read_reg(regs, TCG_REG_R2),
539 tci_read_reg(regs, TCG_REG_R3),
540 tci_read_reg(regs, TCG_REG_R5),
541 tci_read_reg(regs, TCG_REG_R6));
542 tci_write_reg(regs, TCG_REG_R0, tmp64);
543 #endif
544 break;
545 case INDEX_op_br:
546 label = tci_read_label(&tb_ptr);
547 tci_assert(tb_ptr == old_code_ptr + op_size);
548 tb_ptr = (uint8_t *)label;
549 continue;
550 case INDEX_op_setcond_i32:
551 t0 = *tb_ptr++;
552 t1 = tci_read_r32(regs, &tb_ptr);
553 t2 = tci_read_ri32(regs, &tb_ptr);
554 condition = *tb_ptr++;
555 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
556 break;
557 #if TCG_TARGET_REG_BITS == 32
558 case INDEX_op_setcond2_i32:
559 t0 = *tb_ptr++;
560 tmp64 = tci_read_r64(regs, &tb_ptr);
561 v64 = tci_read_ri64(regs, &tb_ptr);
562 condition = *tb_ptr++;
563 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
564 break;
565 #elif TCG_TARGET_REG_BITS == 64
566 case INDEX_op_setcond_i64:
567 t0 = *tb_ptr++;
568 t1 = tci_read_r64(regs, &tb_ptr);
569 t2 = tci_read_ri64(regs, &tb_ptr);
570 condition = *tb_ptr++;
571 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
572 break;
573 #endif
574 case INDEX_op_mov_i32:
575 t0 = *tb_ptr++;
576 t1 = tci_read_r32(regs, &tb_ptr);
577 tci_write_reg32(regs, t0, t1);
578 break;
579 case INDEX_op_movi_i32:
580 t0 = *tb_ptr++;
581 t1 = tci_read_i32(&tb_ptr);
582 tci_write_reg32(regs, t0, t1);
583 break;
585 /* Load/store operations (32 bit). */
587 case INDEX_op_ld8u_i32:
588 t0 = *tb_ptr++;
589 t1 = tci_read_r(regs, &tb_ptr);
590 t2 = tci_read_s32(&tb_ptr);
591 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
592 break;
593 case INDEX_op_ld8s_i32:
594 TODO();
595 break;
596 case INDEX_op_ld16u_i32:
597 TODO();
598 break;
599 case INDEX_op_ld16s_i32:
600 TODO();
601 break;
602 case INDEX_op_ld_i32:
603 t0 = *tb_ptr++;
604 t1 = tci_read_r(regs, &tb_ptr);
605 t2 = tci_read_s32(&tb_ptr);
606 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
607 break;
608 case INDEX_op_st8_i32:
609 t0 = tci_read_r8(regs, &tb_ptr);
610 t1 = tci_read_r(regs, &tb_ptr);
611 t2 = tci_read_s32(&tb_ptr);
612 *(uint8_t *)(t1 + t2) = t0;
613 break;
614 case INDEX_op_st16_i32:
615 t0 = tci_read_r16(regs, &tb_ptr);
616 t1 = tci_read_r(regs, &tb_ptr);
617 t2 = tci_read_s32(&tb_ptr);
618 *(uint16_t *)(t1 + t2) = t0;
619 break;
620 case INDEX_op_st_i32:
621 t0 = tci_read_r32(regs, &tb_ptr);
622 t1 = tci_read_r(regs, &tb_ptr);
623 t2 = tci_read_s32(&tb_ptr);
624 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
625 *(uint32_t *)(t1 + t2) = t0;
626 break;
628 /* Arithmetic operations (32 bit). */
630 case INDEX_op_add_i32:
631 t0 = *tb_ptr++;
632 t1 = tci_read_ri32(regs, &tb_ptr);
633 t2 = tci_read_ri32(regs, &tb_ptr);
634 tci_write_reg32(regs, t0, t1 + t2);
635 break;
636 case INDEX_op_sub_i32:
637 t0 = *tb_ptr++;
638 t1 = tci_read_ri32(regs, &tb_ptr);
639 t2 = tci_read_ri32(regs, &tb_ptr);
640 tci_write_reg32(regs, t0, t1 - t2);
641 break;
642 case INDEX_op_mul_i32:
643 t0 = *tb_ptr++;
644 t1 = tci_read_ri32(regs, &tb_ptr);
645 t2 = tci_read_ri32(regs, &tb_ptr);
646 tci_write_reg32(regs, t0, t1 * t2);
647 break;
648 #if TCG_TARGET_HAS_div_i32
649 case INDEX_op_div_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(regs, &tb_ptr);
652 t2 = tci_read_ri32(regs, &tb_ptr);
653 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
654 break;
655 case INDEX_op_divu_i32:
656 t0 = *tb_ptr++;
657 t1 = tci_read_ri32(regs, &tb_ptr);
658 t2 = tci_read_ri32(regs, &tb_ptr);
659 tci_write_reg32(regs, t0, t1 / t2);
660 break;
661 case INDEX_op_rem_i32:
662 t0 = *tb_ptr++;
663 t1 = tci_read_ri32(regs, &tb_ptr);
664 t2 = tci_read_ri32(regs, &tb_ptr);
665 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
666 break;
667 case INDEX_op_remu_i32:
668 t0 = *tb_ptr++;
669 t1 = tci_read_ri32(regs, &tb_ptr);
670 t2 = tci_read_ri32(regs, &tb_ptr);
671 tci_write_reg32(regs, t0, t1 % t2);
672 break;
673 #elif TCG_TARGET_HAS_div2_i32
674 case INDEX_op_div2_i32:
675 case INDEX_op_divu2_i32:
676 TODO();
677 break;
678 #endif
679 case INDEX_op_and_i32:
680 t0 = *tb_ptr++;
681 t1 = tci_read_ri32(regs, &tb_ptr);
682 t2 = tci_read_ri32(regs, &tb_ptr);
683 tci_write_reg32(regs, t0, t1 & t2);
684 break;
685 case INDEX_op_or_i32:
686 t0 = *tb_ptr++;
687 t1 = tci_read_ri32(regs, &tb_ptr);
688 t2 = tci_read_ri32(regs, &tb_ptr);
689 tci_write_reg32(regs, t0, t1 | t2);
690 break;
691 case INDEX_op_xor_i32:
692 t0 = *tb_ptr++;
693 t1 = tci_read_ri32(regs, &tb_ptr);
694 t2 = tci_read_ri32(regs, &tb_ptr);
695 tci_write_reg32(regs, t0, t1 ^ t2);
696 break;
698 /* Shift/rotate operations (32 bit). */
700 case INDEX_op_shl_i32:
701 t0 = *tb_ptr++;
702 t1 = tci_read_ri32(regs, &tb_ptr);
703 t2 = tci_read_ri32(regs, &tb_ptr);
704 tci_write_reg32(regs, t0, t1 << (t2 & 31));
705 break;
706 case INDEX_op_shr_i32:
707 t0 = *tb_ptr++;
708 t1 = tci_read_ri32(regs, &tb_ptr);
709 t2 = tci_read_ri32(regs, &tb_ptr);
710 tci_write_reg32(regs, t0, t1 >> (t2 & 31));
711 break;
712 case INDEX_op_sar_i32:
713 t0 = *tb_ptr++;
714 t1 = tci_read_ri32(regs, &tb_ptr);
715 t2 = tci_read_ri32(regs, &tb_ptr);
716 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
717 break;
718 #if TCG_TARGET_HAS_rot_i32
719 case INDEX_op_rotl_i32:
720 t0 = *tb_ptr++;
721 t1 = tci_read_ri32(regs, &tb_ptr);
722 t2 = tci_read_ri32(regs, &tb_ptr);
723 tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
724 break;
725 case INDEX_op_rotr_i32:
726 t0 = *tb_ptr++;
727 t1 = tci_read_ri32(regs, &tb_ptr);
728 t2 = tci_read_ri32(regs, &tb_ptr);
729 tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
730 break;
731 #endif
732 #if TCG_TARGET_HAS_deposit_i32
733 case INDEX_op_deposit_i32:
734 t0 = *tb_ptr++;
735 t1 = tci_read_r32(regs, &tb_ptr);
736 t2 = tci_read_r32(regs, &tb_ptr);
737 tmp16 = *tb_ptr++;
738 tmp8 = *tb_ptr++;
739 tmp32 = (((1 << tmp8) - 1) << tmp16);
740 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
741 break;
742 #endif
743 case INDEX_op_brcond_i32:
744 t0 = tci_read_r32(regs, &tb_ptr);
745 t1 = tci_read_ri32(regs, &tb_ptr);
746 condition = *tb_ptr++;
747 label = tci_read_label(&tb_ptr);
748 if (tci_compare32(t0, t1, condition)) {
749 tci_assert(tb_ptr == old_code_ptr + op_size);
750 tb_ptr = (uint8_t *)label;
751 continue;
753 break;
754 #if TCG_TARGET_REG_BITS == 32
755 case INDEX_op_add2_i32:
756 t0 = *tb_ptr++;
757 t1 = *tb_ptr++;
758 tmp64 = tci_read_r64(regs, &tb_ptr);
759 tmp64 += tci_read_r64(regs, &tb_ptr);
760 tci_write_reg64(regs, t1, t0, tmp64);
761 break;
762 case INDEX_op_sub2_i32:
763 t0 = *tb_ptr++;
764 t1 = *tb_ptr++;
765 tmp64 = tci_read_r64(regs, &tb_ptr);
766 tmp64 -= tci_read_r64(regs, &tb_ptr);
767 tci_write_reg64(regs, t1, t0, tmp64);
768 break;
769 case INDEX_op_brcond2_i32:
770 tmp64 = tci_read_r64(regs, &tb_ptr);
771 v64 = tci_read_ri64(regs, &tb_ptr);
772 condition = *tb_ptr++;
773 label = tci_read_label(&tb_ptr);
774 if (tci_compare64(tmp64, v64, condition)) {
775 tci_assert(tb_ptr == old_code_ptr + op_size);
776 tb_ptr = (uint8_t *)label;
777 continue;
779 break;
780 case INDEX_op_mulu2_i32:
781 t0 = *tb_ptr++;
782 t1 = *tb_ptr++;
783 t2 = tci_read_r32(regs, &tb_ptr);
784 tmp64 = tci_read_r32(regs, &tb_ptr);
785 tci_write_reg64(regs, t1, t0, t2 * tmp64);
786 break;
787 #endif /* TCG_TARGET_REG_BITS == 32 */
788 #if TCG_TARGET_HAS_ext8s_i32
789 case INDEX_op_ext8s_i32:
790 t0 = *tb_ptr++;
791 t1 = tci_read_r8s(regs, &tb_ptr);
792 tci_write_reg32(regs, t0, t1);
793 break;
794 #endif
795 #if TCG_TARGET_HAS_ext16s_i32
796 case INDEX_op_ext16s_i32:
797 t0 = *tb_ptr++;
798 t1 = tci_read_r16s(regs, &tb_ptr);
799 tci_write_reg32(regs, t0, t1);
800 break;
801 #endif
802 #if TCG_TARGET_HAS_ext8u_i32
803 case INDEX_op_ext8u_i32:
804 t0 = *tb_ptr++;
805 t1 = tci_read_r8(regs, &tb_ptr);
806 tci_write_reg32(regs, t0, t1);
807 break;
808 #endif
809 #if TCG_TARGET_HAS_ext16u_i32
810 case INDEX_op_ext16u_i32:
811 t0 = *tb_ptr++;
812 t1 = tci_read_r16(regs, &tb_ptr);
813 tci_write_reg32(regs, t0, t1);
814 break;
815 #endif
816 #if TCG_TARGET_HAS_bswap16_i32
817 case INDEX_op_bswap16_i32:
818 t0 = *tb_ptr++;
819 t1 = tci_read_r16(regs, &tb_ptr);
820 tci_write_reg32(regs, t0, bswap16(t1));
821 break;
822 #endif
823 #if TCG_TARGET_HAS_bswap32_i32
824 case INDEX_op_bswap32_i32:
825 t0 = *tb_ptr++;
826 t1 = tci_read_r32(regs, &tb_ptr);
827 tci_write_reg32(regs, t0, bswap32(t1));
828 break;
829 #endif
830 #if TCG_TARGET_HAS_not_i32
831 case INDEX_op_not_i32:
832 t0 = *tb_ptr++;
833 t1 = tci_read_r32(regs, &tb_ptr);
834 tci_write_reg32(regs, t0, ~t1);
835 break;
836 #endif
837 #if TCG_TARGET_HAS_neg_i32
838 case INDEX_op_neg_i32:
839 t0 = *tb_ptr++;
840 t1 = tci_read_r32(regs, &tb_ptr);
841 tci_write_reg32(regs, t0, -t1);
842 break;
843 #endif
844 #if TCG_TARGET_REG_BITS == 64
845 case INDEX_op_mov_i64:
846 t0 = *tb_ptr++;
847 t1 = tci_read_r64(regs, &tb_ptr);
848 tci_write_reg64(regs, t0, t1);
849 break;
850 case INDEX_op_movi_i64:
851 t0 = *tb_ptr++;
852 t1 = tci_read_i64(&tb_ptr);
853 tci_write_reg64(regs, t0, t1);
854 break;
856 /* Load/store operations (64 bit). */
858 case INDEX_op_ld8u_i64:
859 t0 = *tb_ptr++;
860 t1 = tci_read_r(regs, &tb_ptr);
861 t2 = tci_read_s32(&tb_ptr);
862 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
863 break;
864 case INDEX_op_ld8s_i64:
865 TODO();
866 break;
867 case INDEX_op_ld16u_i64:
868 t0 = *tb_ptr++;
869 t1 = tci_read_r(regs, &tb_ptr);
870 t2 = tci_read_s32(&tb_ptr);
871 tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2));
872 break;
873 case INDEX_op_ld16s_i64:
874 TODO();
875 break;
876 case INDEX_op_ld32u_i64:
877 t0 = *tb_ptr++;
878 t1 = tci_read_r(regs, &tb_ptr);
879 t2 = tci_read_s32(&tb_ptr);
880 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
881 break;
882 case INDEX_op_ld32s_i64:
883 t0 = *tb_ptr++;
884 t1 = tci_read_r(regs, &tb_ptr);
885 t2 = tci_read_s32(&tb_ptr);
886 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
887 break;
888 case INDEX_op_ld_i64:
889 t0 = *tb_ptr++;
890 t1 = tci_read_r(regs, &tb_ptr);
891 t2 = tci_read_s32(&tb_ptr);
892 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
893 break;
894 case INDEX_op_st8_i64:
895 t0 = tci_read_r8(regs, &tb_ptr);
896 t1 = tci_read_r(regs, &tb_ptr);
897 t2 = tci_read_s32(&tb_ptr);
898 *(uint8_t *)(t1 + t2) = t0;
899 break;
900 case INDEX_op_st16_i64:
901 t0 = tci_read_r16(regs, &tb_ptr);
902 t1 = tci_read_r(regs, &tb_ptr);
903 t2 = tci_read_s32(&tb_ptr);
904 *(uint16_t *)(t1 + t2) = t0;
905 break;
906 case INDEX_op_st32_i64:
907 t0 = tci_read_r32(regs, &tb_ptr);
908 t1 = tci_read_r(regs, &tb_ptr);
909 t2 = tci_read_s32(&tb_ptr);
910 *(uint32_t *)(t1 + t2) = t0;
911 break;
912 case INDEX_op_st_i64:
913 t0 = tci_read_r64(regs, &tb_ptr);
914 t1 = tci_read_r(regs, &tb_ptr);
915 t2 = tci_read_s32(&tb_ptr);
916 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
917 *(uint64_t *)(t1 + t2) = t0;
918 break;
920 /* Arithmetic operations (64 bit). */
922 case INDEX_op_add_i64:
923 t0 = *tb_ptr++;
924 t1 = tci_read_ri64(regs, &tb_ptr);
925 t2 = tci_read_ri64(regs, &tb_ptr);
926 tci_write_reg64(regs, t0, t1 + t2);
927 break;
928 case INDEX_op_sub_i64:
929 t0 = *tb_ptr++;
930 t1 = tci_read_ri64(regs, &tb_ptr);
931 t2 = tci_read_ri64(regs, &tb_ptr);
932 tci_write_reg64(regs, t0, t1 - t2);
933 break;
934 case INDEX_op_mul_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_ri64(regs, &tb_ptr);
937 t2 = tci_read_ri64(regs, &tb_ptr);
938 tci_write_reg64(regs, t0, t1 * t2);
939 break;
940 #if TCG_TARGET_HAS_div_i64
941 case INDEX_op_div_i64:
942 case INDEX_op_divu_i64:
943 case INDEX_op_rem_i64:
944 case INDEX_op_remu_i64:
945 TODO();
946 break;
947 #elif TCG_TARGET_HAS_div2_i64
948 case INDEX_op_div2_i64:
949 case INDEX_op_divu2_i64:
950 TODO();
951 break;
952 #endif
953 case INDEX_op_and_i64:
954 t0 = *tb_ptr++;
955 t1 = tci_read_ri64(regs, &tb_ptr);
956 t2 = tci_read_ri64(regs, &tb_ptr);
957 tci_write_reg64(regs, t0, t1 & t2);
958 break;
959 case INDEX_op_or_i64:
960 t0 = *tb_ptr++;
961 t1 = tci_read_ri64(regs, &tb_ptr);
962 t2 = tci_read_ri64(regs, &tb_ptr);
963 tci_write_reg64(regs, t0, t1 | t2);
964 break;
965 case INDEX_op_xor_i64:
966 t0 = *tb_ptr++;
967 t1 = tci_read_ri64(regs, &tb_ptr);
968 t2 = tci_read_ri64(regs, &tb_ptr);
969 tci_write_reg64(regs, t0, t1 ^ t2);
970 break;
972 /* Shift/rotate operations (64 bit). */
974 case INDEX_op_shl_i64:
975 t0 = *tb_ptr++;
976 t1 = tci_read_ri64(regs, &tb_ptr);
977 t2 = tci_read_ri64(regs, &tb_ptr);
978 tci_write_reg64(regs, t0, t1 << (t2 & 63));
979 break;
980 case INDEX_op_shr_i64:
981 t0 = *tb_ptr++;
982 t1 = tci_read_ri64(regs, &tb_ptr);
983 t2 = tci_read_ri64(regs, &tb_ptr);
984 tci_write_reg64(regs, t0, t1 >> (t2 & 63));
985 break;
986 case INDEX_op_sar_i64:
987 t0 = *tb_ptr++;
988 t1 = tci_read_ri64(regs, &tb_ptr);
989 t2 = tci_read_ri64(regs, &tb_ptr);
990 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
991 break;
992 #if TCG_TARGET_HAS_rot_i64
993 case INDEX_op_rotl_i64:
994 t0 = *tb_ptr++;
995 t1 = tci_read_ri64(regs, &tb_ptr);
996 t2 = tci_read_ri64(regs, &tb_ptr);
997 tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
998 break;
999 case INDEX_op_rotr_i64:
1000 t0 = *tb_ptr++;
1001 t1 = tci_read_ri64(regs, &tb_ptr);
1002 t2 = tci_read_ri64(regs, &tb_ptr);
1003 tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
1004 break;
1005 #endif
1006 #if TCG_TARGET_HAS_deposit_i64
1007 case INDEX_op_deposit_i64:
1008 t0 = *tb_ptr++;
1009 t1 = tci_read_r64(regs, &tb_ptr);
1010 t2 = tci_read_r64(regs, &tb_ptr);
1011 tmp16 = *tb_ptr++;
1012 tmp8 = *tb_ptr++;
1013 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
1014 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
1015 break;
1016 #endif
1017 case INDEX_op_brcond_i64:
1018 t0 = tci_read_r64(regs, &tb_ptr);
1019 t1 = tci_read_ri64(regs, &tb_ptr);
1020 condition = *tb_ptr++;
1021 label = tci_read_label(&tb_ptr);
1022 if (tci_compare64(t0, t1, condition)) {
1023 tci_assert(tb_ptr == old_code_ptr + op_size);
1024 tb_ptr = (uint8_t *)label;
1025 continue;
1027 break;
1028 #if TCG_TARGET_HAS_ext8u_i64
1029 case INDEX_op_ext8u_i64:
1030 t0 = *tb_ptr++;
1031 t1 = tci_read_r8(regs, &tb_ptr);
1032 tci_write_reg64(regs, t0, t1);
1033 break;
1034 #endif
1035 #if TCG_TARGET_HAS_ext8s_i64
1036 case INDEX_op_ext8s_i64:
1037 t0 = *tb_ptr++;
1038 t1 = tci_read_r8s(regs, &tb_ptr);
1039 tci_write_reg64(regs, t0, t1);
1040 break;
1041 #endif
1042 #if TCG_TARGET_HAS_ext16s_i64
1043 case INDEX_op_ext16s_i64:
1044 t0 = *tb_ptr++;
1045 t1 = tci_read_r16s(regs, &tb_ptr);
1046 tci_write_reg64(regs, t0, t1);
1047 break;
1048 #endif
1049 #if TCG_TARGET_HAS_ext16u_i64
1050 case INDEX_op_ext16u_i64:
1051 t0 = *tb_ptr++;
1052 t1 = tci_read_r16(regs, &tb_ptr);
1053 tci_write_reg64(regs, t0, t1);
1054 break;
1055 #endif
1056 #if TCG_TARGET_HAS_ext32s_i64
1057 case INDEX_op_ext32s_i64:
1058 #endif
1059 case INDEX_op_ext_i32_i64:
1060 t0 = *tb_ptr++;
1061 t1 = tci_read_r32s(regs, &tb_ptr);
1062 tci_write_reg64(regs, t0, t1);
1063 break;
1064 #if TCG_TARGET_HAS_ext32u_i64
1065 case INDEX_op_ext32u_i64:
1066 #endif
1067 case INDEX_op_extu_i32_i64:
1068 t0 = *tb_ptr++;
1069 t1 = tci_read_r32(regs, &tb_ptr);
1070 tci_write_reg64(regs, t0, t1);
1071 break;
1072 #if TCG_TARGET_HAS_bswap16_i64
1073 case INDEX_op_bswap16_i64:
1074 t0 = *tb_ptr++;
1075 t1 = tci_read_r16(regs, &tb_ptr);
1076 tci_write_reg64(regs, t0, bswap16(t1));
1077 break;
1078 #endif
1079 #if TCG_TARGET_HAS_bswap32_i64
1080 case INDEX_op_bswap32_i64:
1081 t0 = *tb_ptr++;
1082 t1 = tci_read_r32(regs, &tb_ptr);
1083 tci_write_reg64(regs, t0, bswap32(t1));
1084 break;
1085 #endif
1086 #if TCG_TARGET_HAS_bswap64_i64
1087 case INDEX_op_bswap64_i64:
1088 t0 = *tb_ptr++;
1089 t1 = tci_read_r64(regs, &tb_ptr);
1090 tci_write_reg64(regs, t0, bswap64(t1));
1091 break;
1092 #endif
1093 #if TCG_TARGET_HAS_not_i64
1094 case INDEX_op_not_i64:
1095 t0 = *tb_ptr++;
1096 t1 = tci_read_r64(regs, &tb_ptr);
1097 tci_write_reg64(regs, t0, ~t1);
1098 break;
1099 #endif
1100 #if TCG_TARGET_HAS_neg_i64
1101 case INDEX_op_neg_i64:
1102 t0 = *tb_ptr++;
1103 t1 = tci_read_r64(regs, &tb_ptr);
1104 tci_write_reg64(regs, t0, -t1);
1105 break;
1106 #endif
1107 #endif /* TCG_TARGET_REG_BITS == 64 */
1109 /* QEMU specific operations. */
1111 case INDEX_op_exit_tb:
1112 ret = *(uint64_t *)tb_ptr;
1113 goto exit;
1114 break;
1115 case INDEX_op_goto_tb:
1116 /* Jump address is aligned */
1117 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1118 t0 = atomic_read((int32_t *)tb_ptr);
1119 tb_ptr += sizeof(int32_t);
1120 tci_assert(tb_ptr == old_code_ptr + op_size);
1121 tb_ptr += (int32_t)t0;
1122 continue;
1123 case INDEX_op_qemu_ld_i32:
1124 t0 = *tb_ptr++;
1125 taddr = tci_read_ulong(regs, &tb_ptr);
1126 oi = tci_read_i(&tb_ptr);
1127 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1128 case MO_UB:
1129 tmp32 = qemu_ld_ub;
1130 break;
1131 case MO_SB:
1132 tmp32 = (int8_t)qemu_ld_ub;
1133 break;
1134 case MO_LEUW:
1135 tmp32 = qemu_ld_leuw;
1136 break;
1137 case MO_LESW:
1138 tmp32 = (int16_t)qemu_ld_leuw;
1139 break;
1140 case MO_LEUL:
1141 tmp32 = qemu_ld_leul;
1142 break;
1143 case MO_BEUW:
1144 tmp32 = qemu_ld_beuw;
1145 break;
1146 case MO_BESW:
1147 tmp32 = (int16_t)qemu_ld_beuw;
1148 break;
1149 case MO_BEUL:
1150 tmp32 = qemu_ld_beul;
1151 break;
1152 default:
1153 tcg_abort();
1155 tci_write_reg(regs, t0, tmp32);
1156 break;
1157 case INDEX_op_qemu_ld_i64:
1158 t0 = *tb_ptr++;
1159 if (TCG_TARGET_REG_BITS == 32) {
1160 t1 = *tb_ptr++;
1162 taddr = tci_read_ulong(regs, &tb_ptr);
1163 oi = tci_read_i(&tb_ptr);
1164 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1165 case MO_UB:
1166 tmp64 = qemu_ld_ub;
1167 break;
1168 case MO_SB:
1169 tmp64 = (int8_t)qemu_ld_ub;
1170 break;
1171 case MO_LEUW:
1172 tmp64 = qemu_ld_leuw;
1173 break;
1174 case MO_LESW:
1175 tmp64 = (int16_t)qemu_ld_leuw;
1176 break;
1177 case MO_LEUL:
1178 tmp64 = qemu_ld_leul;
1179 break;
1180 case MO_LESL:
1181 tmp64 = (int32_t)qemu_ld_leul;
1182 break;
1183 case MO_LEQ:
1184 tmp64 = qemu_ld_leq;
1185 break;
1186 case MO_BEUW:
1187 tmp64 = qemu_ld_beuw;
1188 break;
1189 case MO_BESW:
1190 tmp64 = (int16_t)qemu_ld_beuw;
1191 break;
1192 case MO_BEUL:
1193 tmp64 = qemu_ld_beul;
1194 break;
1195 case MO_BESL:
1196 tmp64 = (int32_t)qemu_ld_beul;
1197 break;
1198 case MO_BEQ:
1199 tmp64 = qemu_ld_beq;
1200 break;
1201 default:
1202 tcg_abort();
1204 tci_write_reg(regs, t0, tmp64);
1205 if (TCG_TARGET_REG_BITS == 32) {
1206 tci_write_reg(regs, t1, tmp64 >> 32);
1208 break;
1209 case INDEX_op_qemu_st_i32:
1210 t0 = tci_read_r(regs, &tb_ptr);
1211 taddr = tci_read_ulong(regs, &tb_ptr);
1212 oi = tci_read_i(&tb_ptr);
1213 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1214 case MO_UB:
1215 qemu_st_b(t0);
1216 break;
1217 case MO_LEUW:
1218 qemu_st_lew(t0);
1219 break;
1220 case MO_LEUL:
1221 qemu_st_lel(t0);
1222 break;
1223 case MO_BEUW:
1224 qemu_st_bew(t0);
1225 break;
1226 case MO_BEUL:
1227 qemu_st_bel(t0);
1228 break;
1229 default:
1230 tcg_abort();
1232 break;
1233 case INDEX_op_qemu_st_i64:
1234 tmp64 = tci_read_r64(regs, &tb_ptr);
1235 taddr = tci_read_ulong(regs, &tb_ptr);
1236 oi = tci_read_i(&tb_ptr);
1237 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1238 case MO_UB:
1239 qemu_st_b(tmp64);
1240 break;
1241 case MO_LEUW:
1242 qemu_st_lew(tmp64);
1243 break;
1244 case MO_LEUL:
1245 qemu_st_lel(tmp64);
1246 break;
1247 case MO_LEQ:
1248 qemu_st_leq(tmp64);
1249 break;
1250 case MO_BEUW:
1251 qemu_st_bew(tmp64);
1252 break;
1253 case MO_BEUL:
1254 qemu_st_bel(tmp64);
1255 break;
1256 case MO_BEQ:
1257 qemu_st_beq(tmp64);
1258 break;
1259 default:
1260 tcg_abort();
1262 break;
1263 case INDEX_op_mb:
1264 /* Ensure ordering for all kinds */
1265 smp_mb();
1266 break;
1267 default:
1268 TODO();
1269 break;
1271 tci_assert(tb_ptr == old_code_ptr + op_size);
1273 exit:
1274 return ret;