Merge remote-tracking branch 'remotes/dgibson/tags/ppc-for-2.11-20171120' into staging
[qemu/ar7.git] / tcg / tci.c
blob63f2cd54ab732d63578c8fccd6208ce78d9731e8
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg-op.h"
35 /* Marker for missing code. */
36 #define TODO() \
37 do { \
38 fprintf(stderr, "TODO %s:%u: %s()\n", \
39 __FILE__, __LINE__, __func__); \
40 tcg_abort(); \
41 } while (0)
43 #if MAX_OPC_PARAM_IARGS != 5
44 # error Fix needed, number of supported input arguments changed!
45 #endif
46 #if TCG_TARGET_REG_BITS == 32
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong);
52 #else
53 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
54 tcg_target_ulong, tcg_target_ulong,
55 tcg_target_ulong);
56 #endif
58 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
60 tci_assert(index < TCG_TARGET_NB_REGS);
61 return regs[index];
64 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
65 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
67 return (int8_t)tci_read_reg(regs, index);
69 #endif
71 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
72 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
74 return (int16_t)tci_read_reg(regs, index);
76 #endif
78 #if TCG_TARGET_REG_BITS == 64
79 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
81 return (int32_t)tci_read_reg(regs, index);
83 #endif
85 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
87 return (uint8_t)tci_read_reg(regs, index);
90 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
92 return (uint16_t)tci_read_reg(regs, index);
95 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
97 return (uint32_t)tci_read_reg(regs, index);
100 #if TCG_TARGET_REG_BITS == 64
101 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
103 return tci_read_reg(regs, index);
105 #endif
107 static void
108 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
110 tci_assert(index < TCG_TARGET_NB_REGS);
111 tci_assert(index != TCG_AREG0);
112 tci_assert(index != TCG_REG_CALL_STACK);
113 regs[index] = value;
116 #if TCG_TARGET_REG_BITS == 64
117 static void
118 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
120 tci_write_reg(regs, index, value);
122 #endif
124 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
126 tci_write_reg(regs, index, value);
129 static void
130 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
132 tci_write_reg(regs, index, value);
135 #if TCG_TARGET_REG_BITS == 32
136 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
137 uint32_t low_index, uint64_t value)
139 tci_write_reg(regs, low_index, value);
140 tci_write_reg(regs, high_index, value >> 32);
142 #elif TCG_TARGET_REG_BITS == 64
143 static void
144 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
146 tci_write_reg(regs, index, value);
148 #endif
150 #if TCG_TARGET_REG_BITS == 32
151 /* Create a 64 bit value from two 32 bit values. */
152 static uint64_t tci_uint64(uint32_t high, uint32_t low)
154 return ((uint64_t)high << 32) + low;
156 #endif
158 /* Read constant (native size) from bytecode. */
159 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
161 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
162 *tb_ptr += sizeof(value);
163 return value;
166 /* Read unsigned constant (32 bit) from bytecode. */
167 static uint32_t tci_read_i32(uint8_t **tb_ptr)
169 uint32_t value = *(uint32_t *)(*tb_ptr);
170 *tb_ptr += sizeof(value);
171 return value;
174 /* Read signed constant (32 bit) from bytecode. */
175 static int32_t tci_read_s32(uint8_t **tb_ptr)
177 int32_t value = *(int32_t *)(*tb_ptr);
178 *tb_ptr += sizeof(value);
179 return value;
182 #if TCG_TARGET_REG_BITS == 64
183 /* Read constant (64 bit) from bytecode. */
184 static uint64_t tci_read_i64(uint8_t **tb_ptr)
186 uint64_t value = *(uint64_t *)(*tb_ptr);
187 *tb_ptr += sizeof(value);
188 return value;
190 #endif
192 /* Read indexed register (native size) from bytecode. */
193 static tcg_target_ulong
194 tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
196 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
197 *tb_ptr += 1;
198 return value;
201 /* Read indexed register (8 bit) from bytecode. */
202 static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
204 uint8_t value = tci_read_reg8(regs, **tb_ptr);
205 *tb_ptr += 1;
206 return value;
209 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
210 /* Read indexed register (8 bit signed) from bytecode. */
211 static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
213 int8_t value = tci_read_reg8s(regs, **tb_ptr);
214 *tb_ptr += 1;
215 return value;
217 #endif
219 /* Read indexed register (16 bit) from bytecode. */
220 static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
222 uint16_t value = tci_read_reg16(regs, **tb_ptr);
223 *tb_ptr += 1;
224 return value;
227 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
228 /* Read indexed register (16 bit signed) from bytecode. */
229 static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
231 int16_t value = tci_read_reg16s(regs, **tb_ptr);
232 *tb_ptr += 1;
233 return value;
235 #endif
237 /* Read indexed register (32 bit) from bytecode. */
238 static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
240 uint32_t value = tci_read_reg32(regs, **tb_ptr);
241 *tb_ptr += 1;
242 return value;
245 #if TCG_TARGET_REG_BITS == 32
246 /* Read two indexed registers (2 * 32 bit) from bytecode. */
247 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
249 uint32_t low = tci_read_r32(regs, tb_ptr);
250 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
252 #elif TCG_TARGET_REG_BITS == 64
253 /* Read indexed register (32 bit signed) from bytecode. */
254 static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
256 int32_t value = tci_read_reg32s(regs, **tb_ptr);
257 *tb_ptr += 1;
258 return value;
261 /* Read indexed register (64 bit) from bytecode. */
262 static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
264 uint64_t value = tci_read_reg64(regs, **tb_ptr);
265 *tb_ptr += 1;
266 return value;
268 #endif
270 /* Read indexed register(s) with target address from bytecode. */
271 static target_ulong
272 tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
274 target_ulong taddr = tci_read_r(regs, tb_ptr);
275 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
276 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
277 #endif
278 return taddr;
281 /* Read indexed register or constant (native size) from bytecode. */
282 static tcg_target_ulong
283 tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
285 tcg_target_ulong value;
286 TCGReg r = **tb_ptr;
287 *tb_ptr += 1;
288 if (r == TCG_CONST) {
289 value = tci_read_i(tb_ptr);
290 } else {
291 value = tci_read_reg(regs, r);
293 return value;
296 /* Read indexed register or constant (32 bit) from bytecode. */
297 static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
299 uint32_t value;
300 TCGReg r = **tb_ptr;
301 *tb_ptr += 1;
302 if (r == TCG_CONST) {
303 value = tci_read_i32(tb_ptr);
304 } else {
305 value = tci_read_reg32(regs, r);
307 return value;
310 #if TCG_TARGET_REG_BITS == 32
311 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
312 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
314 uint32_t low = tci_read_ri32(regs, tb_ptr);
315 return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
317 #elif TCG_TARGET_REG_BITS == 64
318 /* Read indexed register or constant (64 bit) from bytecode. */
319 static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
321 uint64_t value;
322 TCGReg r = **tb_ptr;
323 *tb_ptr += 1;
324 if (r == TCG_CONST) {
325 value = tci_read_i64(tb_ptr);
326 } else {
327 value = tci_read_reg64(regs, r);
329 return value;
331 #endif
333 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
335 tcg_target_ulong label = tci_read_i(tb_ptr);
336 tci_assert(label != 0);
337 return label;
340 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
342 bool result = false;
343 int32_t i0 = u0;
344 int32_t i1 = u1;
345 switch (condition) {
346 case TCG_COND_EQ:
347 result = (u0 == u1);
348 break;
349 case TCG_COND_NE:
350 result = (u0 != u1);
351 break;
352 case TCG_COND_LT:
353 result = (i0 < i1);
354 break;
355 case TCG_COND_GE:
356 result = (i0 >= i1);
357 break;
358 case TCG_COND_LE:
359 result = (i0 <= i1);
360 break;
361 case TCG_COND_GT:
362 result = (i0 > i1);
363 break;
364 case TCG_COND_LTU:
365 result = (u0 < u1);
366 break;
367 case TCG_COND_GEU:
368 result = (u0 >= u1);
369 break;
370 case TCG_COND_LEU:
371 result = (u0 <= u1);
372 break;
373 case TCG_COND_GTU:
374 result = (u0 > u1);
375 break;
376 default:
377 TODO();
379 return result;
382 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
384 bool result = false;
385 int64_t i0 = u0;
386 int64_t i1 = u1;
387 switch (condition) {
388 case TCG_COND_EQ:
389 result = (u0 == u1);
390 break;
391 case TCG_COND_NE:
392 result = (u0 != u1);
393 break;
394 case TCG_COND_LT:
395 result = (i0 < i1);
396 break;
397 case TCG_COND_GE:
398 result = (i0 >= i1);
399 break;
400 case TCG_COND_LE:
401 result = (i0 <= i1);
402 break;
403 case TCG_COND_GT:
404 result = (i0 > i1);
405 break;
406 case TCG_COND_LTU:
407 result = (u0 < u1);
408 break;
409 case TCG_COND_GEU:
410 result = (u0 >= u1);
411 break;
412 case TCG_COND_LEU:
413 result = (u0 <= u1);
414 break;
415 case TCG_COND_GTU:
416 result = (u0 > u1);
417 break;
418 default:
419 TODO();
421 return result;
424 #ifdef CONFIG_SOFTMMU
425 # define qemu_ld_ub \
426 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
427 # define qemu_ld_leuw \
428 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
429 # define qemu_ld_leul \
430 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
431 # define qemu_ld_leq \
432 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
433 # define qemu_ld_beuw \
434 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
435 # define qemu_ld_beul \
436 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
437 # define qemu_ld_beq \
438 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
439 # define qemu_st_b(X) \
440 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
441 # define qemu_st_lew(X) \
442 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
443 # define qemu_st_lel(X) \
444 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
445 # define qemu_st_leq(X) \
446 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
447 # define qemu_st_bew(X) \
448 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
449 # define qemu_st_bel(X) \
450 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
451 # define qemu_st_beq(X) \
452 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
453 #else
454 # define qemu_ld_ub ldub_p(g2h(taddr))
455 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
456 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
457 # define qemu_ld_leq ldq_le_p(g2h(taddr))
458 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
459 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
460 # define qemu_ld_beq ldq_be_p(g2h(taddr))
461 # define qemu_st_b(X) stb_p(g2h(taddr), X)
462 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
463 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
464 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
465 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
466 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
467 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
468 #endif
470 /* Interpret pseudo code in tb. */
471 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
473 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
474 long tcg_temps[CPU_TEMP_BUF_NLONGS];
475 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
476 uintptr_t ret = 0;
478 regs[TCG_AREG0] = (tcg_target_ulong)env;
479 regs[TCG_REG_CALL_STACK] = sp_value;
480 tci_assert(tb_ptr);
482 for (;;) {
483 TCGOpcode opc = tb_ptr[0];
484 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
485 uint8_t op_size = tb_ptr[1];
486 uint8_t *old_code_ptr = tb_ptr;
487 #endif
488 tcg_target_ulong t0;
489 tcg_target_ulong t1;
490 tcg_target_ulong t2;
491 tcg_target_ulong label;
492 TCGCond condition;
493 target_ulong taddr;
494 uint8_t tmp8;
495 uint16_t tmp16;
496 uint32_t tmp32;
497 uint64_t tmp64;
498 #if TCG_TARGET_REG_BITS == 32
499 uint64_t v64;
500 #endif
501 TCGMemOpIdx oi;
503 #if defined(GETPC)
504 tci_tb_ptr = (uintptr_t)tb_ptr;
505 #endif
507 /* Skip opcode and size entry. */
508 tb_ptr += 2;
510 switch (opc) {
511 case INDEX_op_call:
512 t0 = tci_read_ri(regs, &tb_ptr);
513 #if TCG_TARGET_REG_BITS == 32
514 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
515 tci_read_reg(regs, TCG_REG_R1),
516 tci_read_reg(regs, TCG_REG_R2),
517 tci_read_reg(regs, TCG_REG_R3),
518 tci_read_reg(regs, TCG_REG_R5),
519 tci_read_reg(regs, TCG_REG_R6),
520 tci_read_reg(regs, TCG_REG_R7),
521 tci_read_reg(regs, TCG_REG_R8),
522 tci_read_reg(regs, TCG_REG_R9),
523 tci_read_reg(regs, TCG_REG_R10));
524 tci_write_reg(regs, TCG_REG_R0, tmp64);
525 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
526 #else
527 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
528 tci_read_reg(regs, TCG_REG_R1),
529 tci_read_reg(regs, TCG_REG_R2),
530 tci_read_reg(regs, TCG_REG_R3),
531 tci_read_reg(regs, TCG_REG_R5));
532 tci_write_reg(regs, TCG_REG_R0, tmp64);
533 #endif
534 break;
535 case INDEX_op_br:
536 label = tci_read_label(&tb_ptr);
537 tci_assert(tb_ptr == old_code_ptr + op_size);
538 tb_ptr = (uint8_t *)label;
539 continue;
540 case INDEX_op_setcond_i32:
541 t0 = *tb_ptr++;
542 t1 = tci_read_r32(regs, &tb_ptr);
543 t2 = tci_read_ri32(regs, &tb_ptr);
544 condition = *tb_ptr++;
545 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
546 break;
547 #if TCG_TARGET_REG_BITS == 32
548 case INDEX_op_setcond2_i32:
549 t0 = *tb_ptr++;
550 tmp64 = tci_read_r64(regs, &tb_ptr);
551 v64 = tci_read_ri64(regs, &tb_ptr);
552 condition = *tb_ptr++;
553 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
554 break;
555 #elif TCG_TARGET_REG_BITS == 64
556 case INDEX_op_setcond_i64:
557 t0 = *tb_ptr++;
558 t1 = tci_read_r64(regs, &tb_ptr);
559 t2 = tci_read_ri64(regs, &tb_ptr);
560 condition = *tb_ptr++;
561 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
562 break;
563 #endif
564 case INDEX_op_mov_i32:
565 t0 = *tb_ptr++;
566 t1 = tci_read_r32(regs, &tb_ptr);
567 tci_write_reg32(regs, t0, t1);
568 break;
569 case INDEX_op_movi_i32:
570 t0 = *tb_ptr++;
571 t1 = tci_read_i32(&tb_ptr);
572 tci_write_reg32(regs, t0, t1);
573 break;
575 /* Load/store operations (32 bit). */
577 case INDEX_op_ld8u_i32:
578 t0 = *tb_ptr++;
579 t1 = tci_read_r(regs, &tb_ptr);
580 t2 = tci_read_s32(&tb_ptr);
581 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
582 break;
583 case INDEX_op_ld8s_i32:
584 case INDEX_op_ld16u_i32:
585 TODO();
586 break;
587 case INDEX_op_ld16s_i32:
588 TODO();
589 break;
590 case INDEX_op_ld_i32:
591 t0 = *tb_ptr++;
592 t1 = tci_read_r(regs, &tb_ptr);
593 t2 = tci_read_s32(&tb_ptr);
594 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
595 break;
596 case INDEX_op_st8_i32:
597 t0 = tci_read_r8(regs, &tb_ptr);
598 t1 = tci_read_r(regs, &tb_ptr);
599 t2 = tci_read_s32(&tb_ptr);
600 *(uint8_t *)(t1 + t2) = t0;
601 break;
602 case INDEX_op_st16_i32:
603 t0 = tci_read_r16(regs, &tb_ptr);
604 t1 = tci_read_r(regs, &tb_ptr);
605 t2 = tci_read_s32(&tb_ptr);
606 *(uint16_t *)(t1 + t2) = t0;
607 break;
608 case INDEX_op_st_i32:
609 t0 = tci_read_r32(regs, &tb_ptr);
610 t1 = tci_read_r(regs, &tb_ptr);
611 t2 = tci_read_s32(&tb_ptr);
612 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
613 *(uint32_t *)(t1 + t2) = t0;
614 break;
616 /* Arithmetic operations (32 bit). */
618 case INDEX_op_add_i32:
619 t0 = *tb_ptr++;
620 t1 = tci_read_ri32(regs, &tb_ptr);
621 t2 = tci_read_ri32(regs, &tb_ptr);
622 tci_write_reg32(regs, t0, t1 + t2);
623 break;
624 case INDEX_op_sub_i32:
625 t0 = *tb_ptr++;
626 t1 = tci_read_ri32(regs, &tb_ptr);
627 t2 = tci_read_ri32(regs, &tb_ptr);
628 tci_write_reg32(regs, t0, t1 - t2);
629 break;
630 case INDEX_op_mul_i32:
631 t0 = *tb_ptr++;
632 t1 = tci_read_ri32(regs, &tb_ptr);
633 t2 = tci_read_ri32(regs, &tb_ptr);
634 tci_write_reg32(regs, t0, t1 * t2);
635 break;
636 #if TCG_TARGET_HAS_div_i32
637 case INDEX_op_div_i32:
638 t0 = *tb_ptr++;
639 t1 = tci_read_ri32(regs, &tb_ptr);
640 t2 = tci_read_ri32(regs, &tb_ptr);
641 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
642 break;
643 case INDEX_op_divu_i32:
644 t0 = *tb_ptr++;
645 t1 = tci_read_ri32(regs, &tb_ptr);
646 t2 = tci_read_ri32(regs, &tb_ptr);
647 tci_write_reg32(regs, t0, t1 / t2);
648 break;
649 case INDEX_op_rem_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(regs, &tb_ptr);
652 t2 = tci_read_ri32(regs, &tb_ptr);
653 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
654 break;
655 case INDEX_op_remu_i32:
656 t0 = *tb_ptr++;
657 t1 = tci_read_ri32(regs, &tb_ptr);
658 t2 = tci_read_ri32(regs, &tb_ptr);
659 tci_write_reg32(regs, t0, t1 % t2);
660 break;
661 #elif TCG_TARGET_HAS_div2_i32
662 case INDEX_op_div2_i32:
663 case INDEX_op_divu2_i32:
664 TODO();
665 break;
666 #endif
667 case INDEX_op_and_i32:
668 t0 = *tb_ptr++;
669 t1 = tci_read_ri32(regs, &tb_ptr);
670 t2 = tci_read_ri32(regs, &tb_ptr);
671 tci_write_reg32(regs, t0, t1 & t2);
672 break;
673 case INDEX_op_or_i32:
674 t0 = *tb_ptr++;
675 t1 = tci_read_ri32(regs, &tb_ptr);
676 t2 = tci_read_ri32(regs, &tb_ptr);
677 tci_write_reg32(regs, t0, t1 | t2);
678 break;
679 case INDEX_op_xor_i32:
680 t0 = *tb_ptr++;
681 t1 = tci_read_ri32(regs, &tb_ptr);
682 t2 = tci_read_ri32(regs, &tb_ptr);
683 tci_write_reg32(regs, t0, t1 ^ t2);
684 break;
686 /* Shift/rotate operations (32 bit). */
688 case INDEX_op_shl_i32:
689 t0 = *tb_ptr++;
690 t1 = tci_read_ri32(regs, &tb_ptr);
691 t2 = tci_read_ri32(regs, &tb_ptr);
692 tci_write_reg32(regs, t0, t1 << (t2 & 31));
693 break;
694 case INDEX_op_shr_i32:
695 t0 = *tb_ptr++;
696 t1 = tci_read_ri32(regs, &tb_ptr);
697 t2 = tci_read_ri32(regs, &tb_ptr);
698 tci_write_reg32(regs, t0, t1 >> (t2 & 31));
699 break;
700 case INDEX_op_sar_i32:
701 t0 = *tb_ptr++;
702 t1 = tci_read_ri32(regs, &tb_ptr);
703 t2 = tci_read_ri32(regs, &tb_ptr);
704 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
705 break;
706 #if TCG_TARGET_HAS_rot_i32
707 case INDEX_op_rotl_i32:
708 t0 = *tb_ptr++;
709 t1 = tci_read_ri32(regs, &tb_ptr);
710 t2 = tci_read_ri32(regs, &tb_ptr);
711 tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
712 break;
713 case INDEX_op_rotr_i32:
714 t0 = *tb_ptr++;
715 t1 = tci_read_ri32(regs, &tb_ptr);
716 t2 = tci_read_ri32(regs, &tb_ptr);
717 tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
718 break;
719 #endif
720 #if TCG_TARGET_HAS_deposit_i32
721 case INDEX_op_deposit_i32:
722 t0 = *tb_ptr++;
723 t1 = tci_read_r32(regs, &tb_ptr);
724 t2 = tci_read_r32(regs, &tb_ptr);
725 tmp16 = *tb_ptr++;
726 tmp8 = *tb_ptr++;
727 tmp32 = (((1 << tmp8) - 1) << tmp16);
728 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
729 break;
730 #endif
731 case INDEX_op_brcond_i32:
732 t0 = tci_read_r32(regs, &tb_ptr);
733 t1 = tci_read_ri32(regs, &tb_ptr);
734 condition = *tb_ptr++;
735 label = tci_read_label(&tb_ptr);
736 if (tci_compare32(t0, t1, condition)) {
737 tci_assert(tb_ptr == old_code_ptr + op_size);
738 tb_ptr = (uint8_t *)label;
739 continue;
741 break;
742 #if TCG_TARGET_REG_BITS == 32
743 case INDEX_op_add2_i32:
744 t0 = *tb_ptr++;
745 t1 = *tb_ptr++;
746 tmp64 = tci_read_r64(regs, &tb_ptr);
747 tmp64 += tci_read_r64(regs, &tb_ptr);
748 tci_write_reg64(regs, t1, t0, tmp64);
749 break;
750 case INDEX_op_sub2_i32:
751 t0 = *tb_ptr++;
752 t1 = *tb_ptr++;
753 tmp64 = tci_read_r64(regs, &tb_ptr);
754 tmp64 -= tci_read_r64(regs, &tb_ptr);
755 tci_write_reg64(regs, t1, t0, tmp64);
756 break;
757 case INDEX_op_brcond2_i32:
758 tmp64 = tci_read_r64(regs, &tb_ptr);
759 v64 = tci_read_ri64(regs, &tb_ptr);
760 condition = *tb_ptr++;
761 label = tci_read_label(&tb_ptr);
762 if (tci_compare64(tmp64, v64, condition)) {
763 tci_assert(tb_ptr == old_code_ptr + op_size);
764 tb_ptr = (uint8_t *)label;
765 continue;
767 break;
768 case INDEX_op_mulu2_i32:
769 t0 = *tb_ptr++;
770 t1 = *tb_ptr++;
771 t2 = tci_read_r32(regs, &tb_ptr);
772 tmp64 = tci_read_r32(regs, &tb_ptr);
773 tci_write_reg64(regs, t1, t0, t2 * tmp64);
774 break;
775 #endif /* TCG_TARGET_REG_BITS == 32 */
776 #if TCG_TARGET_HAS_ext8s_i32
777 case INDEX_op_ext8s_i32:
778 t0 = *tb_ptr++;
779 t1 = tci_read_r8s(regs, &tb_ptr);
780 tci_write_reg32(regs, t0, t1);
781 break;
782 #endif
783 #if TCG_TARGET_HAS_ext16s_i32
784 case INDEX_op_ext16s_i32:
785 t0 = *tb_ptr++;
786 t1 = tci_read_r16s(regs, &tb_ptr);
787 tci_write_reg32(regs, t0, t1);
788 break;
789 #endif
790 #if TCG_TARGET_HAS_ext8u_i32
791 case INDEX_op_ext8u_i32:
792 t0 = *tb_ptr++;
793 t1 = tci_read_r8(regs, &tb_ptr);
794 tci_write_reg32(regs, t0, t1);
795 break;
796 #endif
797 #if TCG_TARGET_HAS_ext16u_i32
798 case INDEX_op_ext16u_i32:
799 t0 = *tb_ptr++;
800 t1 = tci_read_r16(regs, &tb_ptr);
801 tci_write_reg32(regs, t0, t1);
802 break;
803 #endif
804 #if TCG_TARGET_HAS_bswap16_i32
805 case INDEX_op_bswap16_i32:
806 t0 = *tb_ptr++;
807 t1 = tci_read_r16(regs, &tb_ptr);
808 tci_write_reg32(regs, t0, bswap16(t1));
809 break;
810 #endif
811 #if TCG_TARGET_HAS_bswap32_i32
812 case INDEX_op_bswap32_i32:
813 t0 = *tb_ptr++;
814 t1 = tci_read_r32(regs, &tb_ptr);
815 tci_write_reg32(regs, t0, bswap32(t1));
816 break;
817 #endif
818 #if TCG_TARGET_HAS_not_i32
819 case INDEX_op_not_i32:
820 t0 = *tb_ptr++;
821 t1 = tci_read_r32(regs, &tb_ptr);
822 tci_write_reg32(regs, t0, ~t1);
823 break;
824 #endif
825 #if TCG_TARGET_HAS_neg_i32
826 case INDEX_op_neg_i32:
827 t0 = *tb_ptr++;
828 t1 = tci_read_r32(regs, &tb_ptr);
829 tci_write_reg32(regs, t0, -t1);
830 break;
831 #endif
832 #if TCG_TARGET_REG_BITS == 64
833 case INDEX_op_mov_i64:
834 t0 = *tb_ptr++;
835 t1 = tci_read_r64(regs, &tb_ptr);
836 tci_write_reg64(regs, t0, t1);
837 break;
838 case INDEX_op_movi_i64:
839 t0 = *tb_ptr++;
840 t1 = tci_read_i64(&tb_ptr);
841 tci_write_reg64(regs, t0, t1);
842 break;
844 /* Load/store operations (64 bit). */
846 case INDEX_op_ld8u_i64:
847 t0 = *tb_ptr++;
848 t1 = tci_read_r(regs, &tb_ptr);
849 t2 = tci_read_s32(&tb_ptr);
850 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
851 break;
852 case INDEX_op_ld8s_i64:
853 case INDEX_op_ld16u_i64:
854 case INDEX_op_ld16s_i64:
855 TODO();
856 break;
857 case INDEX_op_ld32u_i64:
858 t0 = *tb_ptr++;
859 t1 = tci_read_r(regs, &tb_ptr);
860 t2 = tci_read_s32(&tb_ptr);
861 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
862 break;
863 case INDEX_op_ld32s_i64:
864 t0 = *tb_ptr++;
865 t1 = tci_read_r(regs, &tb_ptr);
866 t2 = tci_read_s32(&tb_ptr);
867 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
868 break;
869 case INDEX_op_ld_i64:
870 t0 = *tb_ptr++;
871 t1 = tci_read_r(regs, &tb_ptr);
872 t2 = tci_read_s32(&tb_ptr);
873 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
874 break;
875 case INDEX_op_st8_i64:
876 t0 = tci_read_r8(regs, &tb_ptr);
877 t1 = tci_read_r(regs, &tb_ptr);
878 t2 = tci_read_s32(&tb_ptr);
879 *(uint8_t *)(t1 + t2) = t0;
880 break;
881 case INDEX_op_st16_i64:
882 t0 = tci_read_r16(regs, &tb_ptr);
883 t1 = tci_read_r(regs, &tb_ptr);
884 t2 = tci_read_s32(&tb_ptr);
885 *(uint16_t *)(t1 + t2) = t0;
886 break;
887 case INDEX_op_st32_i64:
888 t0 = tci_read_r32(regs, &tb_ptr);
889 t1 = tci_read_r(regs, &tb_ptr);
890 t2 = tci_read_s32(&tb_ptr);
891 *(uint32_t *)(t1 + t2) = t0;
892 break;
893 case INDEX_op_st_i64:
894 t0 = tci_read_r64(regs, &tb_ptr);
895 t1 = tci_read_r(regs, &tb_ptr);
896 t2 = tci_read_s32(&tb_ptr);
897 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
898 *(uint64_t *)(t1 + t2) = t0;
899 break;
901 /* Arithmetic operations (64 bit). */
903 case INDEX_op_add_i64:
904 t0 = *tb_ptr++;
905 t1 = tci_read_ri64(regs, &tb_ptr);
906 t2 = tci_read_ri64(regs, &tb_ptr);
907 tci_write_reg64(regs, t0, t1 + t2);
908 break;
909 case INDEX_op_sub_i64:
910 t0 = *tb_ptr++;
911 t1 = tci_read_ri64(regs, &tb_ptr);
912 t2 = tci_read_ri64(regs, &tb_ptr);
913 tci_write_reg64(regs, t0, t1 - t2);
914 break;
915 case INDEX_op_mul_i64:
916 t0 = *tb_ptr++;
917 t1 = tci_read_ri64(regs, &tb_ptr);
918 t2 = tci_read_ri64(regs, &tb_ptr);
919 tci_write_reg64(regs, t0, t1 * t2);
920 break;
921 #if TCG_TARGET_HAS_div_i64
922 case INDEX_op_div_i64:
923 case INDEX_op_divu_i64:
924 case INDEX_op_rem_i64:
925 case INDEX_op_remu_i64:
926 TODO();
927 break;
928 #elif TCG_TARGET_HAS_div2_i64
929 case INDEX_op_div2_i64:
930 case INDEX_op_divu2_i64:
931 TODO();
932 break;
933 #endif
934 case INDEX_op_and_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_ri64(regs, &tb_ptr);
937 t2 = tci_read_ri64(regs, &tb_ptr);
938 tci_write_reg64(regs, t0, t1 & t2);
939 break;
940 case INDEX_op_or_i64:
941 t0 = *tb_ptr++;
942 t1 = tci_read_ri64(regs, &tb_ptr);
943 t2 = tci_read_ri64(regs, &tb_ptr);
944 tci_write_reg64(regs, t0, t1 | t2);
945 break;
946 case INDEX_op_xor_i64:
947 t0 = *tb_ptr++;
948 t1 = tci_read_ri64(regs, &tb_ptr);
949 t2 = tci_read_ri64(regs, &tb_ptr);
950 tci_write_reg64(regs, t0, t1 ^ t2);
951 break;
953 /* Shift/rotate operations (64 bit). */
955 case INDEX_op_shl_i64:
956 t0 = *tb_ptr++;
957 t1 = tci_read_ri64(regs, &tb_ptr);
958 t2 = tci_read_ri64(regs, &tb_ptr);
959 tci_write_reg64(regs, t0, t1 << (t2 & 63));
960 break;
961 case INDEX_op_shr_i64:
962 t0 = *tb_ptr++;
963 t1 = tci_read_ri64(regs, &tb_ptr);
964 t2 = tci_read_ri64(regs, &tb_ptr);
965 tci_write_reg64(regs, t0, t1 >> (t2 & 63));
966 break;
967 case INDEX_op_sar_i64:
968 t0 = *tb_ptr++;
969 t1 = tci_read_ri64(regs, &tb_ptr);
970 t2 = tci_read_ri64(regs, &tb_ptr);
971 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
972 break;
973 #if TCG_TARGET_HAS_rot_i64
974 case INDEX_op_rotl_i64:
975 t0 = *tb_ptr++;
976 t1 = tci_read_ri64(regs, &tb_ptr);
977 t2 = tci_read_ri64(regs, &tb_ptr);
978 tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
979 break;
980 case INDEX_op_rotr_i64:
981 t0 = *tb_ptr++;
982 t1 = tci_read_ri64(regs, &tb_ptr);
983 t2 = tci_read_ri64(regs, &tb_ptr);
984 tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
985 break;
986 #endif
987 #if TCG_TARGET_HAS_deposit_i64
988 case INDEX_op_deposit_i64:
989 t0 = *tb_ptr++;
990 t1 = tci_read_r64(regs, &tb_ptr);
991 t2 = tci_read_r64(regs, &tb_ptr);
992 tmp16 = *tb_ptr++;
993 tmp8 = *tb_ptr++;
994 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
995 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
996 break;
997 #endif
998 case INDEX_op_brcond_i64:
999 t0 = tci_read_r64(regs, &tb_ptr);
1000 t1 = tci_read_ri64(regs, &tb_ptr);
1001 condition = *tb_ptr++;
1002 label = tci_read_label(&tb_ptr);
1003 if (tci_compare64(t0, t1, condition)) {
1004 tci_assert(tb_ptr == old_code_ptr + op_size);
1005 tb_ptr = (uint8_t *)label;
1006 continue;
1008 break;
1009 #if TCG_TARGET_HAS_ext8u_i64
1010 case INDEX_op_ext8u_i64:
1011 t0 = *tb_ptr++;
1012 t1 = tci_read_r8(regs, &tb_ptr);
1013 tci_write_reg64(regs, t0, t1);
1014 break;
1015 #endif
1016 #if TCG_TARGET_HAS_ext8s_i64
1017 case INDEX_op_ext8s_i64:
1018 t0 = *tb_ptr++;
1019 t1 = tci_read_r8s(regs, &tb_ptr);
1020 tci_write_reg64(regs, t0, t1);
1021 break;
1022 #endif
1023 #if TCG_TARGET_HAS_ext16s_i64
1024 case INDEX_op_ext16s_i64:
1025 t0 = *tb_ptr++;
1026 t1 = tci_read_r16s(regs, &tb_ptr);
1027 tci_write_reg64(regs, t0, t1);
1028 break;
1029 #endif
1030 #if TCG_TARGET_HAS_ext16u_i64
1031 case INDEX_op_ext16u_i64:
1032 t0 = *tb_ptr++;
1033 t1 = tci_read_r16(regs, &tb_ptr);
1034 tci_write_reg64(regs, t0, t1);
1035 break;
1036 #endif
1037 #if TCG_TARGET_HAS_ext32s_i64
1038 case INDEX_op_ext32s_i64:
1039 #endif
1040 case INDEX_op_ext_i32_i64:
1041 t0 = *tb_ptr++;
1042 t1 = tci_read_r32s(regs, &tb_ptr);
1043 tci_write_reg64(regs, t0, t1);
1044 break;
1045 #if TCG_TARGET_HAS_ext32u_i64
1046 case INDEX_op_ext32u_i64:
1047 #endif
1048 case INDEX_op_extu_i32_i64:
1049 t0 = *tb_ptr++;
1050 t1 = tci_read_r32(regs, &tb_ptr);
1051 tci_write_reg64(regs, t0, t1);
1052 break;
1053 #if TCG_TARGET_HAS_bswap16_i64
1054 case INDEX_op_bswap16_i64:
1055 t0 = *tb_ptr++;
1056 t1 = tci_read_r16(regs, &tb_ptr);
1057 tci_write_reg64(regs, t0, bswap16(t1));
1058 break;
1059 #endif
1060 #if TCG_TARGET_HAS_bswap32_i64
1061 case INDEX_op_bswap32_i64:
1062 t0 = *tb_ptr++;
1063 t1 = tci_read_r32(regs, &tb_ptr);
1064 tci_write_reg64(regs, t0, bswap32(t1));
1065 break;
1066 #endif
1067 #if TCG_TARGET_HAS_bswap64_i64
1068 case INDEX_op_bswap64_i64:
1069 t0 = *tb_ptr++;
1070 t1 = tci_read_r64(regs, &tb_ptr);
1071 tci_write_reg64(regs, t0, bswap64(t1));
1072 break;
1073 #endif
1074 #if TCG_TARGET_HAS_not_i64
1075 case INDEX_op_not_i64:
1076 t0 = *tb_ptr++;
1077 t1 = tci_read_r64(regs, &tb_ptr);
1078 tci_write_reg64(regs, t0, ~t1);
1079 break;
1080 #endif
1081 #if TCG_TARGET_HAS_neg_i64
1082 case INDEX_op_neg_i64:
1083 t0 = *tb_ptr++;
1084 t1 = tci_read_r64(regs, &tb_ptr);
1085 tci_write_reg64(regs, t0, -t1);
1086 break;
1087 #endif
1088 #endif /* TCG_TARGET_REG_BITS == 64 */
1090 /* QEMU specific operations. */
1092 case INDEX_op_exit_tb:
1093 ret = *(uint64_t *)tb_ptr;
1094 goto exit;
1095 break;
1096 case INDEX_op_goto_tb:
1097 /* Jump address is aligned */
1098 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1099 t0 = atomic_read((int32_t *)tb_ptr);
1100 tb_ptr += sizeof(int32_t);
1101 tci_assert(tb_ptr == old_code_ptr + op_size);
1102 tb_ptr += (int32_t)t0;
1103 continue;
1104 case INDEX_op_qemu_ld_i32:
1105 t0 = *tb_ptr++;
1106 taddr = tci_read_ulong(regs, &tb_ptr);
1107 oi = tci_read_i(&tb_ptr);
1108 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1109 case MO_UB:
1110 tmp32 = qemu_ld_ub;
1111 break;
1112 case MO_SB:
1113 tmp32 = (int8_t)qemu_ld_ub;
1114 break;
1115 case MO_LEUW:
1116 tmp32 = qemu_ld_leuw;
1117 break;
1118 case MO_LESW:
1119 tmp32 = (int16_t)qemu_ld_leuw;
1120 break;
1121 case MO_LEUL:
1122 tmp32 = qemu_ld_leul;
1123 break;
1124 case MO_BEUW:
1125 tmp32 = qemu_ld_beuw;
1126 break;
1127 case MO_BESW:
1128 tmp32 = (int16_t)qemu_ld_beuw;
1129 break;
1130 case MO_BEUL:
1131 tmp32 = qemu_ld_beul;
1132 break;
1133 default:
1134 tcg_abort();
1136 tci_write_reg(regs, t0, tmp32);
1137 break;
1138 case INDEX_op_qemu_ld_i64:
1139 t0 = *tb_ptr++;
1140 if (TCG_TARGET_REG_BITS == 32) {
1141 t1 = *tb_ptr++;
1143 taddr = tci_read_ulong(regs, &tb_ptr);
1144 oi = tci_read_i(&tb_ptr);
1145 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1146 case MO_UB:
1147 tmp64 = qemu_ld_ub;
1148 break;
1149 case MO_SB:
1150 tmp64 = (int8_t)qemu_ld_ub;
1151 break;
1152 case MO_LEUW:
1153 tmp64 = qemu_ld_leuw;
1154 break;
1155 case MO_LESW:
1156 tmp64 = (int16_t)qemu_ld_leuw;
1157 break;
1158 case MO_LEUL:
1159 tmp64 = qemu_ld_leul;
1160 break;
1161 case MO_LESL:
1162 tmp64 = (int32_t)qemu_ld_leul;
1163 break;
1164 case MO_LEQ:
1165 tmp64 = qemu_ld_leq;
1166 break;
1167 case MO_BEUW:
1168 tmp64 = qemu_ld_beuw;
1169 break;
1170 case MO_BESW:
1171 tmp64 = (int16_t)qemu_ld_beuw;
1172 break;
1173 case MO_BEUL:
1174 tmp64 = qemu_ld_beul;
1175 break;
1176 case MO_BESL:
1177 tmp64 = (int32_t)qemu_ld_beul;
1178 break;
1179 case MO_BEQ:
1180 tmp64 = qemu_ld_beq;
1181 break;
1182 default:
1183 tcg_abort();
1185 tci_write_reg(regs, t0, tmp64);
1186 if (TCG_TARGET_REG_BITS == 32) {
1187 tci_write_reg(regs, t1, tmp64 >> 32);
1189 break;
1190 case INDEX_op_qemu_st_i32:
1191 t0 = tci_read_r(regs, &tb_ptr);
1192 taddr = tci_read_ulong(regs, &tb_ptr);
1193 oi = tci_read_i(&tb_ptr);
1194 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1195 case MO_UB:
1196 qemu_st_b(t0);
1197 break;
1198 case MO_LEUW:
1199 qemu_st_lew(t0);
1200 break;
1201 case MO_LEUL:
1202 qemu_st_lel(t0);
1203 break;
1204 case MO_BEUW:
1205 qemu_st_bew(t0);
1206 break;
1207 case MO_BEUL:
1208 qemu_st_bel(t0);
1209 break;
1210 default:
1211 tcg_abort();
1213 break;
1214 case INDEX_op_qemu_st_i64:
1215 tmp64 = tci_read_r64(regs, &tb_ptr);
1216 taddr = tci_read_ulong(regs, &tb_ptr);
1217 oi = tci_read_i(&tb_ptr);
1218 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1219 case MO_UB:
1220 qemu_st_b(tmp64);
1221 break;
1222 case MO_LEUW:
1223 qemu_st_lew(tmp64);
1224 break;
1225 case MO_LEUL:
1226 qemu_st_lel(tmp64);
1227 break;
1228 case MO_LEQ:
1229 qemu_st_leq(tmp64);
1230 break;
1231 case MO_BEUW:
1232 qemu_st_bew(tmp64);
1233 break;
1234 case MO_BEUL:
1235 qemu_st_bel(tmp64);
1236 break;
1237 case MO_BEQ:
1238 qemu_st_beq(tmp64);
1239 break;
1240 default:
1241 tcg_abort();
1243 break;
1244 case INDEX_op_mb:
1245 /* Ensure ordering for all kinds */
1246 smp_mb();
1247 break;
1248 default:
1249 TODO();
1250 break;
1252 tci_assert(tb_ptr == old_code_ptr + op_size);
1254 exit:
1255 return ret;