hw/virtio/virtio-balloon: Remove the "class" property
[qemu/ar7.git] / tcg / tci.c
blob3fc82d3c79d759898cae374e79e7877c7a6c9ad8
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 /* Marker for missing code. */
37 #define TODO() \
38 do { \
39 fprintf(stderr, "TODO %s:%u: %s()\n", \
40 __FILE__, __LINE__, __func__); \
41 tcg_abort(); \
42 } while (0)
44 #if MAX_OPC_PARAM_IARGS != 6
45 # error Fix needed, number of supported input arguments changed!
46 #endif
47 #if TCG_TARGET_REG_BITS == 32
48 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong,
52 tcg_target_ulong, tcg_target_ulong,
53 tcg_target_ulong, tcg_target_ulong);
54 #else
55 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
56 tcg_target_ulong, tcg_target_ulong,
57 tcg_target_ulong, tcg_target_ulong);
58 #endif
60 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
62 tci_assert(index < TCG_TARGET_NB_REGS);
63 return regs[index];
66 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
67 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
69 return (int8_t)tci_read_reg(regs, index);
71 #endif
73 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
74 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
76 return (int16_t)tci_read_reg(regs, index);
78 #endif
80 #if TCG_TARGET_REG_BITS == 64
81 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
83 return (int32_t)tci_read_reg(regs, index);
85 #endif
87 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
89 return (uint8_t)tci_read_reg(regs, index);
92 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
94 return (uint16_t)tci_read_reg(regs, index);
97 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
99 return (uint32_t)tci_read_reg(regs, index);
102 #if TCG_TARGET_REG_BITS == 64
103 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
105 return tci_read_reg(regs, index);
107 #endif
109 static void
110 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
112 tci_assert(index < TCG_TARGET_NB_REGS);
113 tci_assert(index != TCG_AREG0);
114 tci_assert(index != TCG_REG_CALL_STACK);
115 regs[index] = value;
118 #if TCG_TARGET_REG_BITS == 64
119 static void
120 tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
122 tci_write_reg(regs, index, value);
124 #endif
126 static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
128 tci_write_reg(regs, index, value);
131 #if TCG_TARGET_REG_BITS == 64
132 static void
133 tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value)
135 tci_write_reg(regs, index, value);
137 #endif
139 static void
140 tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
142 tci_write_reg(regs, index, value);
145 #if TCG_TARGET_REG_BITS == 32
146 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
147 uint32_t low_index, uint64_t value)
149 tci_write_reg(regs, low_index, value);
150 tci_write_reg(regs, high_index, value >> 32);
152 #elif TCG_TARGET_REG_BITS == 64
153 static void
154 tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
156 tci_write_reg(regs, index, value);
158 #endif
160 #if TCG_TARGET_REG_BITS == 32
161 /* Create a 64 bit value from two 32 bit values. */
162 static uint64_t tci_uint64(uint32_t high, uint32_t low)
164 return ((uint64_t)high << 32) + low;
166 #endif
168 /* Read constant (native size) from bytecode. */
169 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
171 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
172 *tb_ptr += sizeof(value);
173 return value;
176 /* Read unsigned constant (32 bit) from bytecode. */
177 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
179 uint32_t value = *(const uint32_t *)(*tb_ptr);
180 *tb_ptr += sizeof(value);
181 return value;
184 /* Read signed constant (32 bit) from bytecode. */
185 static int32_t tci_read_s32(const uint8_t **tb_ptr)
187 int32_t value = *(const int32_t *)(*tb_ptr);
188 *tb_ptr += sizeof(value);
189 return value;
192 #if TCG_TARGET_REG_BITS == 64
193 /* Read constant (64 bit) from bytecode. */
194 static uint64_t tci_read_i64(const uint8_t **tb_ptr)
196 uint64_t value = *(const uint64_t *)(*tb_ptr);
197 *tb_ptr += sizeof(value);
198 return value;
200 #endif
202 /* Read indexed register (native size) from bytecode. */
203 static tcg_target_ulong
204 tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
206 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
207 *tb_ptr += 1;
208 return value;
211 /* Read indexed register (8 bit) from bytecode. */
212 static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
214 uint8_t value = tci_read_reg8(regs, **tb_ptr);
215 *tb_ptr += 1;
216 return value;
219 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
220 /* Read indexed register (8 bit signed) from bytecode. */
221 static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
223 int8_t value = tci_read_reg8s(regs, **tb_ptr);
224 *tb_ptr += 1;
225 return value;
227 #endif
229 /* Read indexed register (16 bit) from bytecode. */
230 static uint16_t tci_read_r16(const tcg_target_ulong *regs,
231 const uint8_t **tb_ptr)
233 uint16_t value = tci_read_reg16(regs, **tb_ptr);
234 *tb_ptr += 1;
235 return value;
238 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
239 /* Read indexed register (16 bit signed) from bytecode. */
240 static int16_t tci_read_r16s(const tcg_target_ulong *regs,
241 const uint8_t **tb_ptr)
243 int16_t value = tci_read_reg16s(regs, **tb_ptr);
244 *tb_ptr += 1;
245 return value;
247 #endif
249 /* Read indexed register (32 bit) from bytecode. */
250 static uint32_t tci_read_r32(const tcg_target_ulong *regs,
251 const uint8_t **tb_ptr)
253 uint32_t value = tci_read_reg32(regs, **tb_ptr);
254 *tb_ptr += 1;
255 return value;
258 #if TCG_TARGET_REG_BITS == 32
259 /* Read two indexed registers (2 * 32 bit) from bytecode. */
260 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
261 const uint8_t **tb_ptr)
263 uint32_t low = tci_read_r32(regs, tb_ptr);
264 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
266 #elif TCG_TARGET_REG_BITS == 64
267 /* Read indexed register (32 bit signed) from bytecode. */
268 static int32_t tci_read_r32s(const tcg_target_ulong *regs,
269 const uint8_t **tb_ptr)
271 int32_t value = tci_read_reg32s(regs, **tb_ptr);
272 *tb_ptr += 1;
273 return value;
276 /* Read indexed register (64 bit) from bytecode. */
277 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
278 const uint8_t **tb_ptr)
280 uint64_t value = tci_read_reg64(regs, **tb_ptr);
281 *tb_ptr += 1;
282 return value;
284 #endif
286 /* Read indexed register(s) with target address from bytecode. */
287 static target_ulong
288 tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
290 target_ulong taddr = tci_read_r(regs, tb_ptr);
291 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
292 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
293 #endif
294 return taddr;
297 /* Read indexed register or constant (native size) from bytecode. */
298 static tcg_target_ulong
299 tci_read_ri(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
301 tcg_target_ulong value;
302 TCGReg r = **tb_ptr;
303 *tb_ptr += 1;
304 if (r == TCG_CONST) {
305 value = tci_read_i(tb_ptr);
306 } else {
307 value = tci_read_reg(regs, r);
309 return value;
312 /* Read indexed register or constant (32 bit) from bytecode. */
313 static uint32_t tci_read_ri32(const tcg_target_ulong *regs,
314 const uint8_t **tb_ptr)
316 uint32_t value;
317 TCGReg r = **tb_ptr;
318 *tb_ptr += 1;
319 if (r == TCG_CONST) {
320 value = tci_read_i32(tb_ptr);
321 } else {
322 value = tci_read_reg32(regs, r);
324 return value;
327 #if TCG_TARGET_REG_BITS == 32
328 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
329 static uint64_t tci_read_ri64(const tcg_target_ulong *regs,
330 const uint8_t **tb_ptr)
332 uint32_t low = tci_read_ri32(regs, tb_ptr);
333 return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
335 #elif TCG_TARGET_REG_BITS == 64
336 /* Read indexed register or constant (64 bit) from bytecode. */
337 static uint64_t tci_read_ri64(const tcg_target_ulong *regs,
338 const uint8_t **tb_ptr)
340 uint64_t value;
341 TCGReg r = **tb_ptr;
342 *tb_ptr += 1;
343 if (r == TCG_CONST) {
344 value = tci_read_i64(tb_ptr);
345 } else {
346 value = tci_read_reg64(regs, r);
348 return value;
350 #endif
352 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
354 tcg_target_ulong label = tci_read_i(tb_ptr);
355 tci_assert(label != 0);
356 return label;
359 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
361 bool result = false;
362 int32_t i0 = u0;
363 int32_t i1 = u1;
364 switch (condition) {
365 case TCG_COND_EQ:
366 result = (u0 == u1);
367 break;
368 case TCG_COND_NE:
369 result = (u0 != u1);
370 break;
371 case TCG_COND_LT:
372 result = (i0 < i1);
373 break;
374 case TCG_COND_GE:
375 result = (i0 >= i1);
376 break;
377 case TCG_COND_LE:
378 result = (i0 <= i1);
379 break;
380 case TCG_COND_GT:
381 result = (i0 > i1);
382 break;
383 case TCG_COND_LTU:
384 result = (u0 < u1);
385 break;
386 case TCG_COND_GEU:
387 result = (u0 >= u1);
388 break;
389 case TCG_COND_LEU:
390 result = (u0 <= u1);
391 break;
392 case TCG_COND_GTU:
393 result = (u0 > u1);
394 break;
395 default:
396 TODO();
398 return result;
401 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
403 bool result = false;
404 int64_t i0 = u0;
405 int64_t i1 = u1;
406 switch (condition) {
407 case TCG_COND_EQ:
408 result = (u0 == u1);
409 break;
410 case TCG_COND_NE:
411 result = (u0 != u1);
412 break;
413 case TCG_COND_LT:
414 result = (i0 < i1);
415 break;
416 case TCG_COND_GE:
417 result = (i0 >= i1);
418 break;
419 case TCG_COND_LE:
420 result = (i0 <= i1);
421 break;
422 case TCG_COND_GT:
423 result = (i0 > i1);
424 break;
425 case TCG_COND_LTU:
426 result = (u0 < u1);
427 break;
428 case TCG_COND_GEU:
429 result = (u0 >= u1);
430 break;
431 case TCG_COND_LEU:
432 result = (u0 <= u1);
433 break;
434 case TCG_COND_GTU:
435 result = (u0 > u1);
436 break;
437 default:
438 TODO();
440 return result;
443 #ifdef CONFIG_SOFTMMU
444 # define qemu_ld_ub \
445 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
446 # define qemu_ld_leuw \
447 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
448 # define qemu_ld_leul \
449 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
450 # define qemu_ld_leq \
451 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
452 # define qemu_ld_beuw \
453 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
454 # define qemu_ld_beul \
455 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
456 # define qemu_ld_beq \
457 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
458 # define qemu_st_b(X) \
459 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
460 # define qemu_st_lew(X) \
461 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
462 # define qemu_st_lel(X) \
463 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
464 # define qemu_st_leq(X) \
465 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
466 # define qemu_st_bew(X) \
467 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
468 # define qemu_st_bel(X) \
469 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
470 # define qemu_st_beq(X) \
471 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
472 #else
473 # define qemu_ld_ub ldub_p(g2h(taddr))
474 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
475 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
476 # define qemu_ld_leq ldq_le_p(g2h(taddr))
477 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
478 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
479 # define qemu_ld_beq ldq_be_p(g2h(taddr))
480 # define qemu_st_b(X) stb_p(g2h(taddr), X)
481 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
482 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
483 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
484 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
485 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
486 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
487 #endif
489 /* Interpret pseudo code in tb. */
491 * Disable CFI checks.
492 * One possible operation in the pseudo code is a call to binary code.
493 * Therefore, disable CFI checks in the interpreter function
495 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
496 const void *v_tb_ptr)
498 const uint8_t *tb_ptr = v_tb_ptr;
499 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
500 long tcg_temps[CPU_TEMP_BUF_NLONGS];
501 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
502 uintptr_t ret = 0;
504 regs[TCG_AREG0] = (tcg_target_ulong)env;
505 regs[TCG_REG_CALL_STACK] = sp_value;
506 tci_assert(tb_ptr);
508 for (;;) {
509 TCGOpcode opc = tb_ptr[0];
510 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
511 uint8_t op_size = tb_ptr[1];
512 const uint8_t *old_code_ptr = tb_ptr;
513 #endif
514 tcg_target_ulong t0;
515 tcg_target_ulong t1;
516 tcg_target_ulong t2;
517 tcg_target_ulong label;
518 TCGCond condition;
519 target_ulong taddr;
520 uint8_t tmp8;
521 uint16_t tmp16;
522 uint32_t tmp32;
523 uint64_t tmp64;
524 #if TCG_TARGET_REG_BITS == 32
525 uint64_t v64;
526 #endif
527 TCGMemOpIdx oi;
529 #if defined(GETPC)
530 tci_tb_ptr = (uintptr_t)tb_ptr;
531 #endif
533 /* Skip opcode and size entry. */
534 tb_ptr += 2;
536 switch (opc) {
537 case INDEX_op_call:
538 t0 = tci_read_ri(regs, &tb_ptr);
539 #if TCG_TARGET_REG_BITS == 32
540 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
541 tci_read_reg(regs, TCG_REG_R1),
542 tci_read_reg(regs, TCG_REG_R2),
543 tci_read_reg(regs, TCG_REG_R3),
544 tci_read_reg(regs, TCG_REG_R5),
545 tci_read_reg(regs, TCG_REG_R6),
546 tci_read_reg(regs, TCG_REG_R7),
547 tci_read_reg(regs, TCG_REG_R8),
548 tci_read_reg(regs, TCG_REG_R9),
549 tci_read_reg(regs, TCG_REG_R10),
550 tci_read_reg(regs, TCG_REG_R11),
551 tci_read_reg(regs, TCG_REG_R12));
552 tci_write_reg(regs, TCG_REG_R0, tmp64);
553 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
554 #else
555 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
556 tci_read_reg(regs, TCG_REG_R1),
557 tci_read_reg(regs, TCG_REG_R2),
558 tci_read_reg(regs, TCG_REG_R3),
559 tci_read_reg(regs, TCG_REG_R5),
560 tci_read_reg(regs, TCG_REG_R6));
561 tci_write_reg(regs, TCG_REG_R0, tmp64);
562 #endif
563 break;
564 case INDEX_op_br:
565 label = tci_read_label(&tb_ptr);
566 tci_assert(tb_ptr == old_code_ptr + op_size);
567 tb_ptr = (uint8_t *)label;
568 continue;
569 case INDEX_op_setcond_i32:
570 t0 = *tb_ptr++;
571 t1 = tci_read_r32(regs, &tb_ptr);
572 t2 = tci_read_ri32(regs, &tb_ptr);
573 condition = *tb_ptr++;
574 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
575 break;
576 #if TCG_TARGET_REG_BITS == 32
577 case INDEX_op_setcond2_i32:
578 t0 = *tb_ptr++;
579 tmp64 = tci_read_r64(regs, &tb_ptr);
580 v64 = tci_read_ri64(regs, &tb_ptr);
581 condition = *tb_ptr++;
582 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
583 break;
584 #elif TCG_TARGET_REG_BITS == 64
585 case INDEX_op_setcond_i64:
586 t0 = *tb_ptr++;
587 t1 = tci_read_r64(regs, &tb_ptr);
588 t2 = tci_read_ri64(regs, &tb_ptr);
589 condition = *tb_ptr++;
590 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
591 break;
592 #endif
593 case INDEX_op_mov_i32:
594 t0 = *tb_ptr++;
595 t1 = tci_read_r32(regs, &tb_ptr);
596 tci_write_reg32(regs, t0, t1);
597 break;
598 case INDEX_op_tci_movi_i32:
599 t0 = *tb_ptr++;
600 t1 = tci_read_i32(&tb_ptr);
601 tci_write_reg32(regs, t0, t1);
602 break;
604 /* Load/store operations (32 bit). */
606 case INDEX_op_ld8u_i32:
607 t0 = *tb_ptr++;
608 t1 = tci_read_r(regs, &tb_ptr);
609 t2 = tci_read_s32(&tb_ptr);
610 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
611 break;
612 case INDEX_op_ld8s_i32:
613 TODO();
614 break;
615 case INDEX_op_ld16u_i32:
616 TODO();
617 break;
618 case INDEX_op_ld16s_i32:
619 TODO();
620 break;
621 case INDEX_op_ld_i32:
622 t0 = *tb_ptr++;
623 t1 = tci_read_r(regs, &tb_ptr);
624 t2 = tci_read_s32(&tb_ptr);
625 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
626 break;
627 case INDEX_op_st8_i32:
628 t0 = tci_read_r8(regs, &tb_ptr);
629 t1 = tci_read_r(regs, &tb_ptr);
630 t2 = tci_read_s32(&tb_ptr);
631 *(uint8_t *)(t1 + t2) = t0;
632 break;
633 case INDEX_op_st16_i32:
634 t0 = tci_read_r16(regs, &tb_ptr);
635 t1 = tci_read_r(regs, &tb_ptr);
636 t2 = tci_read_s32(&tb_ptr);
637 *(uint16_t *)(t1 + t2) = t0;
638 break;
639 case INDEX_op_st_i32:
640 t0 = tci_read_r32(regs, &tb_ptr);
641 t1 = tci_read_r(regs, &tb_ptr);
642 t2 = tci_read_s32(&tb_ptr);
643 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
644 *(uint32_t *)(t1 + t2) = t0;
645 break;
647 /* Arithmetic operations (32 bit). */
649 case INDEX_op_add_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(regs, &tb_ptr);
652 t2 = tci_read_ri32(regs, &tb_ptr);
653 tci_write_reg32(regs, t0, t1 + t2);
654 break;
655 case INDEX_op_sub_i32:
656 t0 = *tb_ptr++;
657 t1 = tci_read_ri32(regs, &tb_ptr);
658 t2 = tci_read_ri32(regs, &tb_ptr);
659 tci_write_reg32(regs, t0, t1 - t2);
660 break;
661 case INDEX_op_mul_i32:
662 t0 = *tb_ptr++;
663 t1 = tci_read_ri32(regs, &tb_ptr);
664 t2 = tci_read_ri32(regs, &tb_ptr);
665 tci_write_reg32(regs, t0, t1 * t2);
666 break;
667 #if TCG_TARGET_HAS_div_i32
668 case INDEX_op_div_i32:
669 t0 = *tb_ptr++;
670 t1 = tci_read_ri32(regs, &tb_ptr);
671 t2 = tci_read_ri32(regs, &tb_ptr);
672 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
673 break;
674 case INDEX_op_divu_i32:
675 t0 = *tb_ptr++;
676 t1 = tci_read_ri32(regs, &tb_ptr);
677 t2 = tci_read_ri32(regs, &tb_ptr);
678 tci_write_reg32(regs, t0, t1 / t2);
679 break;
680 case INDEX_op_rem_i32:
681 t0 = *tb_ptr++;
682 t1 = tci_read_ri32(regs, &tb_ptr);
683 t2 = tci_read_ri32(regs, &tb_ptr);
684 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
685 break;
686 case INDEX_op_remu_i32:
687 t0 = *tb_ptr++;
688 t1 = tci_read_ri32(regs, &tb_ptr);
689 t2 = tci_read_ri32(regs, &tb_ptr);
690 tci_write_reg32(regs, t0, t1 % t2);
691 break;
692 #elif TCG_TARGET_HAS_div2_i32
693 case INDEX_op_div2_i32:
694 case INDEX_op_divu2_i32:
695 TODO();
696 break;
697 #endif
698 case INDEX_op_and_i32:
699 t0 = *tb_ptr++;
700 t1 = tci_read_ri32(regs, &tb_ptr);
701 t2 = tci_read_ri32(regs, &tb_ptr);
702 tci_write_reg32(regs, t0, t1 & t2);
703 break;
704 case INDEX_op_or_i32:
705 t0 = *tb_ptr++;
706 t1 = tci_read_ri32(regs, &tb_ptr);
707 t2 = tci_read_ri32(regs, &tb_ptr);
708 tci_write_reg32(regs, t0, t1 | t2);
709 break;
710 case INDEX_op_xor_i32:
711 t0 = *tb_ptr++;
712 t1 = tci_read_ri32(regs, &tb_ptr);
713 t2 = tci_read_ri32(regs, &tb_ptr);
714 tci_write_reg32(regs, t0, t1 ^ t2);
715 break;
717 /* Shift/rotate operations (32 bit). */
719 case INDEX_op_shl_i32:
720 t0 = *tb_ptr++;
721 t1 = tci_read_ri32(regs, &tb_ptr);
722 t2 = tci_read_ri32(regs, &tb_ptr);
723 tci_write_reg32(regs, t0, t1 << (t2 & 31));
724 break;
725 case INDEX_op_shr_i32:
726 t0 = *tb_ptr++;
727 t1 = tci_read_ri32(regs, &tb_ptr);
728 t2 = tci_read_ri32(regs, &tb_ptr);
729 tci_write_reg32(regs, t0, t1 >> (t2 & 31));
730 break;
731 case INDEX_op_sar_i32:
732 t0 = *tb_ptr++;
733 t1 = tci_read_ri32(regs, &tb_ptr);
734 t2 = tci_read_ri32(regs, &tb_ptr);
735 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
736 break;
737 #if TCG_TARGET_HAS_rot_i32
738 case INDEX_op_rotl_i32:
739 t0 = *tb_ptr++;
740 t1 = tci_read_ri32(regs, &tb_ptr);
741 t2 = tci_read_ri32(regs, &tb_ptr);
742 tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
743 break;
744 case INDEX_op_rotr_i32:
745 t0 = *tb_ptr++;
746 t1 = tci_read_ri32(regs, &tb_ptr);
747 t2 = tci_read_ri32(regs, &tb_ptr);
748 tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
749 break;
750 #endif
751 #if TCG_TARGET_HAS_deposit_i32
752 case INDEX_op_deposit_i32:
753 t0 = *tb_ptr++;
754 t1 = tci_read_r32(regs, &tb_ptr);
755 t2 = tci_read_r32(regs, &tb_ptr);
756 tmp16 = *tb_ptr++;
757 tmp8 = *tb_ptr++;
758 tmp32 = (((1 << tmp8) - 1) << tmp16);
759 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
760 break;
761 #endif
762 case INDEX_op_brcond_i32:
763 t0 = tci_read_r32(regs, &tb_ptr);
764 t1 = tci_read_ri32(regs, &tb_ptr);
765 condition = *tb_ptr++;
766 label = tci_read_label(&tb_ptr);
767 if (tci_compare32(t0, t1, condition)) {
768 tci_assert(tb_ptr == old_code_ptr + op_size);
769 tb_ptr = (uint8_t *)label;
770 continue;
772 break;
773 #if TCG_TARGET_REG_BITS == 32
774 case INDEX_op_add2_i32:
775 t0 = *tb_ptr++;
776 t1 = *tb_ptr++;
777 tmp64 = tci_read_r64(regs, &tb_ptr);
778 tmp64 += tci_read_r64(regs, &tb_ptr);
779 tci_write_reg64(regs, t1, t0, tmp64);
780 break;
781 case INDEX_op_sub2_i32:
782 t0 = *tb_ptr++;
783 t1 = *tb_ptr++;
784 tmp64 = tci_read_r64(regs, &tb_ptr);
785 tmp64 -= tci_read_r64(regs, &tb_ptr);
786 tci_write_reg64(regs, t1, t0, tmp64);
787 break;
788 case INDEX_op_brcond2_i32:
789 tmp64 = tci_read_r64(regs, &tb_ptr);
790 v64 = tci_read_ri64(regs, &tb_ptr);
791 condition = *tb_ptr++;
792 label = tci_read_label(&tb_ptr);
793 if (tci_compare64(tmp64, v64, condition)) {
794 tci_assert(tb_ptr == old_code_ptr + op_size);
795 tb_ptr = (uint8_t *)label;
796 continue;
798 break;
799 case INDEX_op_mulu2_i32:
800 t0 = *tb_ptr++;
801 t1 = *tb_ptr++;
802 t2 = tci_read_r32(regs, &tb_ptr);
803 tmp64 = tci_read_r32(regs, &tb_ptr);
804 tci_write_reg64(regs, t1, t0, t2 * tmp64);
805 break;
806 #endif /* TCG_TARGET_REG_BITS == 32 */
807 #if TCG_TARGET_HAS_ext8s_i32
808 case INDEX_op_ext8s_i32:
809 t0 = *tb_ptr++;
810 t1 = tci_read_r8s(regs, &tb_ptr);
811 tci_write_reg32(regs, t0, t1);
812 break;
813 #endif
814 #if TCG_TARGET_HAS_ext16s_i32
815 case INDEX_op_ext16s_i32:
816 t0 = *tb_ptr++;
817 t1 = tci_read_r16s(regs, &tb_ptr);
818 tci_write_reg32(regs, t0, t1);
819 break;
820 #endif
821 #if TCG_TARGET_HAS_ext8u_i32
822 case INDEX_op_ext8u_i32:
823 t0 = *tb_ptr++;
824 t1 = tci_read_r8(regs, &tb_ptr);
825 tci_write_reg32(regs, t0, t1);
826 break;
827 #endif
828 #if TCG_TARGET_HAS_ext16u_i32
829 case INDEX_op_ext16u_i32:
830 t0 = *tb_ptr++;
831 t1 = tci_read_r16(regs, &tb_ptr);
832 tci_write_reg32(regs, t0, t1);
833 break;
834 #endif
835 #if TCG_TARGET_HAS_bswap16_i32
836 case INDEX_op_bswap16_i32:
837 t0 = *tb_ptr++;
838 t1 = tci_read_r16(regs, &tb_ptr);
839 tci_write_reg32(regs, t0, bswap16(t1));
840 break;
841 #endif
842 #if TCG_TARGET_HAS_bswap32_i32
843 case INDEX_op_bswap32_i32:
844 t0 = *tb_ptr++;
845 t1 = tci_read_r32(regs, &tb_ptr);
846 tci_write_reg32(regs, t0, bswap32(t1));
847 break;
848 #endif
849 #if TCG_TARGET_HAS_not_i32
850 case INDEX_op_not_i32:
851 t0 = *tb_ptr++;
852 t1 = tci_read_r32(regs, &tb_ptr);
853 tci_write_reg32(regs, t0, ~t1);
854 break;
855 #endif
856 #if TCG_TARGET_HAS_neg_i32
857 case INDEX_op_neg_i32:
858 t0 = *tb_ptr++;
859 t1 = tci_read_r32(regs, &tb_ptr);
860 tci_write_reg32(regs, t0, -t1);
861 break;
862 #endif
863 #if TCG_TARGET_REG_BITS == 64
864 case INDEX_op_mov_i64:
865 t0 = *tb_ptr++;
866 t1 = tci_read_r64(regs, &tb_ptr);
867 tci_write_reg64(regs, t0, t1);
868 break;
869 case INDEX_op_tci_movi_i64:
870 t0 = *tb_ptr++;
871 t1 = tci_read_i64(&tb_ptr);
872 tci_write_reg64(regs, t0, t1);
873 break;
875 /* Load/store operations (64 bit). */
877 case INDEX_op_ld8u_i64:
878 t0 = *tb_ptr++;
879 t1 = tci_read_r(regs, &tb_ptr);
880 t2 = tci_read_s32(&tb_ptr);
881 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
882 break;
883 case INDEX_op_ld8s_i64:
884 TODO();
885 break;
886 case INDEX_op_ld16u_i64:
887 t0 = *tb_ptr++;
888 t1 = tci_read_r(regs, &tb_ptr);
889 t2 = tci_read_s32(&tb_ptr);
890 tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2));
891 break;
892 case INDEX_op_ld16s_i64:
893 TODO();
894 break;
895 case INDEX_op_ld32u_i64:
896 t0 = *tb_ptr++;
897 t1 = tci_read_r(regs, &tb_ptr);
898 t2 = tci_read_s32(&tb_ptr);
899 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
900 break;
901 case INDEX_op_ld32s_i64:
902 t0 = *tb_ptr++;
903 t1 = tci_read_r(regs, &tb_ptr);
904 t2 = tci_read_s32(&tb_ptr);
905 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
906 break;
907 case INDEX_op_ld_i64:
908 t0 = *tb_ptr++;
909 t1 = tci_read_r(regs, &tb_ptr);
910 t2 = tci_read_s32(&tb_ptr);
911 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
912 break;
913 case INDEX_op_st8_i64:
914 t0 = tci_read_r8(regs, &tb_ptr);
915 t1 = tci_read_r(regs, &tb_ptr);
916 t2 = tci_read_s32(&tb_ptr);
917 *(uint8_t *)(t1 + t2) = t0;
918 break;
919 case INDEX_op_st16_i64:
920 t0 = tci_read_r16(regs, &tb_ptr);
921 t1 = tci_read_r(regs, &tb_ptr);
922 t2 = tci_read_s32(&tb_ptr);
923 *(uint16_t *)(t1 + t2) = t0;
924 break;
925 case INDEX_op_st32_i64:
926 t0 = tci_read_r32(regs, &tb_ptr);
927 t1 = tci_read_r(regs, &tb_ptr);
928 t2 = tci_read_s32(&tb_ptr);
929 *(uint32_t *)(t1 + t2) = t0;
930 break;
931 case INDEX_op_st_i64:
932 t0 = tci_read_r64(regs, &tb_ptr);
933 t1 = tci_read_r(regs, &tb_ptr);
934 t2 = tci_read_s32(&tb_ptr);
935 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
936 *(uint64_t *)(t1 + t2) = t0;
937 break;
939 /* Arithmetic operations (64 bit). */
941 case INDEX_op_add_i64:
942 t0 = *tb_ptr++;
943 t1 = tci_read_ri64(regs, &tb_ptr);
944 t2 = tci_read_ri64(regs, &tb_ptr);
945 tci_write_reg64(regs, t0, t1 + t2);
946 break;
947 case INDEX_op_sub_i64:
948 t0 = *tb_ptr++;
949 t1 = tci_read_ri64(regs, &tb_ptr);
950 t2 = tci_read_ri64(regs, &tb_ptr);
951 tci_write_reg64(regs, t0, t1 - t2);
952 break;
953 case INDEX_op_mul_i64:
954 t0 = *tb_ptr++;
955 t1 = tci_read_ri64(regs, &tb_ptr);
956 t2 = tci_read_ri64(regs, &tb_ptr);
957 tci_write_reg64(regs, t0, t1 * t2);
958 break;
959 #if TCG_TARGET_HAS_div_i64
960 case INDEX_op_div_i64:
961 case INDEX_op_divu_i64:
962 case INDEX_op_rem_i64:
963 case INDEX_op_remu_i64:
964 TODO();
965 break;
966 #elif TCG_TARGET_HAS_div2_i64
967 case INDEX_op_div2_i64:
968 case INDEX_op_divu2_i64:
969 TODO();
970 break;
971 #endif
972 case INDEX_op_and_i64:
973 t0 = *tb_ptr++;
974 t1 = tci_read_ri64(regs, &tb_ptr);
975 t2 = tci_read_ri64(regs, &tb_ptr);
976 tci_write_reg64(regs, t0, t1 & t2);
977 break;
978 case INDEX_op_or_i64:
979 t0 = *tb_ptr++;
980 t1 = tci_read_ri64(regs, &tb_ptr);
981 t2 = tci_read_ri64(regs, &tb_ptr);
982 tci_write_reg64(regs, t0, t1 | t2);
983 break;
984 case INDEX_op_xor_i64:
985 t0 = *tb_ptr++;
986 t1 = tci_read_ri64(regs, &tb_ptr);
987 t2 = tci_read_ri64(regs, &tb_ptr);
988 tci_write_reg64(regs, t0, t1 ^ t2);
989 break;
991 /* Shift/rotate operations (64 bit). */
993 case INDEX_op_shl_i64:
994 t0 = *tb_ptr++;
995 t1 = tci_read_ri64(regs, &tb_ptr);
996 t2 = tci_read_ri64(regs, &tb_ptr);
997 tci_write_reg64(regs, t0, t1 << (t2 & 63));
998 break;
999 case INDEX_op_shr_i64:
1000 t0 = *tb_ptr++;
1001 t1 = tci_read_ri64(regs, &tb_ptr);
1002 t2 = tci_read_ri64(regs, &tb_ptr);
1003 tci_write_reg64(regs, t0, t1 >> (t2 & 63));
1004 break;
1005 case INDEX_op_sar_i64:
1006 t0 = *tb_ptr++;
1007 t1 = tci_read_ri64(regs, &tb_ptr);
1008 t2 = tci_read_ri64(regs, &tb_ptr);
1009 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
1010 break;
1011 #if TCG_TARGET_HAS_rot_i64
1012 case INDEX_op_rotl_i64:
1013 t0 = *tb_ptr++;
1014 t1 = tci_read_ri64(regs, &tb_ptr);
1015 t2 = tci_read_ri64(regs, &tb_ptr);
1016 tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
1017 break;
1018 case INDEX_op_rotr_i64:
1019 t0 = *tb_ptr++;
1020 t1 = tci_read_ri64(regs, &tb_ptr);
1021 t2 = tci_read_ri64(regs, &tb_ptr);
1022 tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
1023 break;
1024 #endif
1025 #if TCG_TARGET_HAS_deposit_i64
1026 case INDEX_op_deposit_i64:
1027 t0 = *tb_ptr++;
1028 t1 = tci_read_r64(regs, &tb_ptr);
1029 t2 = tci_read_r64(regs, &tb_ptr);
1030 tmp16 = *tb_ptr++;
1031 tmp8 = *tb_ptr++;
1032 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
1033 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
1034 break;
1035 #endif
1036 case INDEX_op_brcond_i64:
1037 t0 = tci_read_r64(regs, &tb_ptr);
1038 t1 = tci_read_ri64(regs, &tb_ptr);
1039 condition = *tb_ptr++;
1040 label = tci_read_label(&tb_ptr);
1041 if (tci_compare64(t0, t1, condition)) {
1042 tci_assert(tb_ptr == old_code_ptr + op_size);
1043 tb_ptr = (uint8_t *)label;
1044 continue;
1046 break;
1047 #if TCG_TARGET_HAS_ext8u_i64
1048 case INDEX_op_ext8u_i64:
1049 t0 = *tb_ptr++;
1050 t1 = tci_read_r8(regs, &tb_ptr);
1051 tci_write_reg64(regs, t0, t1);
1052 break;
1053 #endif
1054 #if TCG_TARGET_HAS_ext8s_i64
1055 case INDEX_op_ext8s_i64:
1056 t0 = *tb_ptr++;
1057 t1 = tci_read_r8s(regs, &tb_ptr);
1058 tci_write_reg64(regs, t0, t1);
1059 break;
1060 #endif
1061 #if TCG_TARGET_HAS_ext16s_i64
1062 case INDEX_op_ext16s_i64:
1063 t0 = *tb_ptr++;
1064 t1 = tci_read_r16s(regs, &tb_ptr);
1065 tci_write_reg64(regs, t0, t1);
1066 break;
1067 #endif
1068 #if TCG_TARGET_HAS_ext16u_i64
1069 case INDEX_op_ext16u_i64:
1070 t0 = *tb_ptr++;
1071 t1 = tci_read_r16(regs, &tb_ptr);
1072 tci_write_reg64(regs, t0, t1);
1073 break;
1074 #endif
1075 #if TCG_TARGET_HAS_ext32s_i64
1076 case INDEX_op_ext32s_i64:
1077 #endif
1078 case INDEX_op_ext_i32_i64:
1079 t0 = *tb_ptr++;
1080 t1 = tci_read_r32s(regs, &tb_ptr);
1081 tci_write_reg64(regs, t0, t1);
1082 break;
1083 #if TCG_TARGET_HAS_ext32u_i64
1084 case INDEX_op_ext32u_i64:
1085 #endif
1086 case INDEX_op_extu_i32_i64:
1087 t0 = *tb_ptr++;
1088 t1 = tci_read_r32(regs, &tb_ptr);
1089 tci_write_reg64(regs, t0, t1);
1090 break;
1091 #if TCG_TARGET_HAS_bswap16_i64
1092 case INDEX_op_bswap16_i64:
1093 t0 = *tb_ptr++;
1094 t1 = tci_read_r16(regs, &tb_ptr);
1095 tci_write_reg64(regs, t0, bswap16(t1));
1096 break;
1097 #endif
1098 #if TCG_TARGET_HAS_bswap32_i64
1099 case INDEX_op_bswap32_i64:
1100 t0 = *tb_ptr++;
1101 t1 = tci_read_r32(regs, &tb_ptr);
1102 tci_write_reg64(regs, t0, bswap32(t1));
1103 break;
1104 #endif
1105 #if TCG_TARGET_HAS_bswap64_i64
1106 case INDEX_op_bswap64_i64:
1107 t0 = *tb_ptr++;
1108 t1 = tci_read_r64(regs, &tb_ptr);
1109 tci_write_reg64(regs, t0, bswap64(t1));
1110 break;
1111 #endif
1112 #if TCG_TARGET_HAS_not_i64
1113 case INDEX_op_not_i64:
1114 t0 = *tb_ptr++;
1115 t1 = tci_read_r64(regs, &tb_ptr);
1116 tci_write_reg64(regs, t0, ~t1);
1117 break;
1118 #endif
1119 #if TCG_TARGET_HAS_neg_i64
1120 case INDEX_op_neg_i64:
1121 t0 = *tb_ptr++;
1122 t1 = tci_read_r64(regs, &tb_ptr);
1123 tci_write_reg64(regs, t0, -t1);
1124 break;
1125 #endif
1126 #endif /* TCG_TARGET_REG_BITS == 64 */
1128 /* QEMU specific operations. */
1130 case INDEX_op_exit_tb:
1131 ret = *(uint64_t *)tb_ptr;
1132 goto exit;
1133 break;
1134 case INDEX_op_goto_tb:
1135 /* Jump address is aligned */
1136 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1137 t0 = qatomic_read((int32_t *)tb_ptr);
1138 tb_ptr += sizeof(int32_t);
1139 tci_assert(tb_ptr == old_code_ptr + op_size);
1140 tb_ptr += (int32_t)t0;
1141 continue;
1142 case INDEX_op_qemu_ld_i32:
1143 t0 = *tb_ptr++;
1144 taddr = tci_read_ulong(regs, &tb_ptr);
1145 oi = tci_read_i(&tb_ptr);
1146 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1147 case MO_UB:
1148 tmp32 = qemu_ld_ub;
1149 break;
1150 case MO_SB:
1151 tmp32 = (int8_t)qemu_ld_ub;
1152 break;
1153 case MO_LEUW:
1154 tmp32 = qemu_ld_leuw;
1155 break;
1156 case MO_LESW:
1157 tmp32 = (int16_t)qemu_ld_leuw;
1158 break;
1159 case MO_LEUL:
1160 tmp32 = qemu_ld_leul;
1161 break;
1162 case MO_BEUW:
1163 tmp32 = qemu_ld_beuw;
1164 break;
1165 case MO_BESW:
1166 tmp32 = (int16_t)qemu_ld_beuw;
1167 break;
1168 case MO_BEUL:
1169 tmp32 = qemu_ld_beul;
1170 break;
1171 default:
1172 tcg_abort();
1174 tci_write_reg(regs, t0, tmp32);
1175 break;
1176 case INDEX_op_qemu_ld_i64:
1177 t0 = *tb_ptr++;
1178 if (TCG_TARGET_REG_BITS == 32) {
1179 t1 = *tb_ptr++;
1181 taddr = tci_read_ulong(regs, &tb_ptr);
1182 oi = tci_read_i(&tb_ptr);
1183 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1184 case MO_UB:
1185 tmp64 = qemu_ld_ub;
1186 break;
1187 case MO_SB:
1188 tmp64 = (int8_t)qemu_ld_ub;
1189 break;
1190 case MO_LEUW:
1191 tmp64 = qemu_ld_leuw;
1192 break;
1193 case MO_LESW:
1194 tmp64 = (int16_t)qemu_ld_leuw;
1195 break;
1196 case MO_LEUL:
1197 tmp64 = qemu_ld_leul;
1198 break;
1199 case MO_LESL:
1200 tmp64 = (int32_t)qemu_ld_leul;
1201 break;
1202 case MO_LEQ:
1203 tmp64 = qemu_ld_leq;
1204 break;
1205 case MO_BEUW:
1206 tmp64 = qemu_ld_beuw;
1207 break;
1208 case MO_BESW:
1209 tmp64 = (int16_t)qemu_ld_beuw;
1210 break;
1211 case MO_BEUL:
1212 tmp64 = qemu_ld_beul;
1213 break;
1214 case MO_BESL:
1215 tmp64 = (int32_t)qemu_ld_beul;
1216 break;
1217 case MO_BEQ:
1218 tmp64 = qemu_ld_beq;
1219 break;
1220 default:
1221 tcg_abort();
1223 tci_write_reg(regs, t0, tmp64);
1224 if (TCG_TARGET_REG_BITS == 32) {
1225 tci_write_reg(regs, t1, tmp64 >> 32);
1227 break;
1228 case INDEX_op_qemu_st_i32:
1229 t0 = tci_read_r(regs, &tb_ptr);
1230 taddr = tci_read_ulong(regs, &tb_ptr);
1231 oi = tci_read_i(&tb_ptr);
1232 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1233 case MO_UB:
1234 qemu_st_b(t0);
1235 break;
1236 case MO_LEUW:
1237 qemu_st_lew(t0);
1238 break;
1239 case MO_LEUL:
1240 qemu_st_lel(t0);
1241 break;
1242 case MO_BEUW:
1243 qemu_st_bew(t0);
1244 break;
1245 case MO_BEUL:
1246 qemu_st_bel(t0);
1247 break;
1248 default:
1249 tcg_abort();
1251 break;
1252 case INDEX_op_qemu_st_i64:
1253 tmp64 = tci_read_r64(regs, &tb_ptr);
1254 taddr = tci_read_ulong(regs, &tb_ptr);
1255 oi = tci_read_i(&tb_ptr);
1256 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1257 case MO_UB:
1258 qemu_st_b(tmp64);
1259 break;
1260 case MO_LEUW:
1261 qemu_st_lew(tmp64);
1262 break;
1263 case MO_LEUL:
1264 qemu_st_lel(tmp64);
1265 break;
1266 case MO_LEQ:
1267 qemu_st_leq(tmp64);
1268 break;
1269 case MO_BEUW:
1270 qemu_st_bew(tmp64);
1271 break;
1272 case MO_BEUL:
1273 qemu_st_bel(tmp64);
1274 break;
1275 case MO_BEQ:
1276 qemu_st_beq(tmp64);
1277 break;
1278 default:
1279 tcg_abort();
1281 break;
1282 case INDEX_op_mb:
1283 /* Ensure ordering for all kinds */
1284 smp_mb();
1285 break;
1286 default:
1287 TODO();
1288 break;
1290 tci_assert(tb_ptr == old_code_ptr + op_size);
1292 exit:
1293 return ret;