x86: Fix x86_64 'g' packet response to gdb from 32-bit mode.
[qemu/ar7.git] / tci.c
blob4bdc645f2a55ef8a15950772f70ce17f393593c9
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg-op.h"
35 /* Marker for missing code. */
36 #define TODO() \
37 do { \
38 fprintf(stderr, "TODO %s:%u: %s()\n", \
39 __FILE__, __LINE__, __func__); \
40 tcg_abort(); \
41 } while (0)
43 #if MAX_OPC_PARAM_IARGS != 5
44 # error Fix needed, number of supported input arguments changed!
45 #endif
46 #if TCG_TARGET_REG_BITS == 32
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong);
52 #else
53 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
54 tcg_target_ulong, tcg_target_ulong,
55 tcg_target_ulong);
56 #endif
58 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
60 static tcg_target_ulong tci_read_reg(TCGReg index)
62 tci_assert(index < ARRAY_SIZE(tci_reg));
63 return tci_reg[index];
66 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
67 static int8_t tci_read_reg8s(TCGReg index)
69 return (int8_t)tci_read_reg(index);
71 #endif
73 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
74 static int16_t tci_read_reg16s(TCGReg index)
76 return (int16_t)tci_read_reg(index);
78 #endif
80 #if TCG_TARGET_REG_BITS == 64
81 static int32_t tci_read_reg32s(TCGReg index)
83 return (int32_t)tci_read_reg(index);
85 #endif
87 static uint8_t tci_read_reg8(TCGReg index)
89 return (uint8_t)tci_read_reg(index);
92 static uint16_t tci_read_reg16(TCGReg index)
94 return (uint16_t)tci_read_reg(index);
97 static uint32_t tci_read_reg32(TCGReg index)
99 return (uint32_t)tci_read_reg(index);
102 #if TCG_TARGET_REG_BITS == 64
103 static uint64_t tci_read_reg64(TCGReg index)
105 return tci_read_reg(index);
107 #endif
109 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
111 tci_assert(index < ARRAY_SIZE(tci_reg));
112 tci_assert(index != TCG_AREG0);
113 tci_assert(index != TCG_REG_CALL_STACK);
114 tci_reg[index] = value;
117 #if TCG_TARGET_REG_BITS == 64
118 static void tci_write_reg32s(TCGReg index, int32_t value)
120 tci_write_reg(index, value);
122 #endif
124 static void tci_write_reg8(TCGReg index, uint8_t value)
126 tci_write_reg(index, value);
129 static void tci_write_reg32(TCGReg index, uint32_t value)
131 tci_write_reg(index, value);
134 #if TCG_TARGET_REG_BITS == 32
135 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
136 uint64_t value)
138 tci_write_reg(low_index, value);
139 tci_write_reg(high_index, value >> 32);
141 #elif TCG_TARGET_REG_BITS == 64
142 static void tci_write_reg64(TCGReg index, uint64_t value)
144 tci_write_reg(index, value);
146 #endif
148 #if TCG_TARGET_REG_BITS == 32
149 /* Create a 64 bit value from two 32 bit values. */
150 static uint64_t tci_uint64(uint32_t high, uint32_t low)
152 return ((uint64_t)high << 32) + low;
154 #endif
156 /* Read constant (native size) from bytecode. */
157 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
159 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
160 *tb_ptr += sizeof(value);
161 return value;
164 /* Read unsigned constant (32 bit) from bytecode. */
165 static uint32_t tci_read_i32(uint8_t **tb_ptr)
167 uint32_t value = *(uint32_t *)(*tb_ptr);
168 *tb_ptr += sizeof(value);
169 return value;
172 /* Read signed constant (32 bit) from bytecode. */
173 static int32_t tci_read_s32(uint8_t **tb_ptr)
175 int32_t value = *(int32_t *)(*tb_ptr);
176 *tb_ptr += sizeof(value);
177 return value;
180 #if TCG_TARGET_REG_BITS == 64
181 /* Read constant (64 bit) from bytecode. */
182 static uint64_t tci_read_i64(uint8_t **tb_ptr)
184 uint64_t value = *(uint64_t *)(*tb_ptr);
185 *tb_ptr += sizeof(value);
186 return value;
188 #endif
190 /* Read indexed register (native size) from bytecode. */
191 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
193 tcg_target_ulong value = tci_read_reg(**tb_ptr);
194 *tb_ptr += 1;
195 return value;
198 /* Read indexed register (8 bit) from bytecode. */
199 static uint8_t tci_read_r8(uint8_t **tb_ptr)
201 uint8_t value = tci_read_reg8(**tb_ptr);
202 *tb_ptr += 1;
203 return value;
206 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
207 /* Read indexed register (8 bit signed) from bytecode. */
208 static int8_t tci_read_r8s(uint8_t **tb_ptr)
210 int8_t value = tci_read_reg8s(**tb_ptr);
211 *tb_ptr += 1;
212 return value;
214 #endif
216 /* Read indexed register (16 bit) from bytecode. */
217 static uint16_t tci_read_r16(uint8_t **tb_ptr)
219 uint16_t value = tci_read_reg16(**tb_ptr);
220 *tb_ptr += 1;
221 return value;
224 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
225 /* Read indexed register (16 bit signed) from bytecode. */
226 static int16_t tci_read_r16s(uint8_t **tb_ptr)
228 int16_t value = tci_read_reg16s(**tb_ptr);
229 *tb_ptr += 1;
230 return value;
232 #endif
234 /* Read indexed register (32 bit) from bytecode. */
235 static uint32_t tci_read_r32(uint8_t **tb_ptr)
237 uint32_t value = tci_read_reg32(**tb_ptr);
238 *tb_ptr += 1;
239 return value;
242 #if TCG_TARGET_REG_BITS == 32
243 /* Read two indexed registers (2 * 32 bit) from bytecode. */
244 static uint64_t tci_read_r64(uint8_t **tb_ptr)
246 uint32_t low = tci_read_r32(tb_ptr);
247 return tci_uint64(tci_read_r32(tb_ptr), low);
249 #elif TCG_TARGET_REG_BITS == 64
250 /* Read indexed register (32 bit signed) from bytecode. */
251 static int32_t tci_read_r32s(uint8_t **tb_ptr)
253 int32_t value = tci_read_reg32s(**tb_ptr);
254 *tb_ptr += 1;
255 return value;
258 /* Read indexed register (64 bit) from bytecode. */
259 static uint64_t tci_read_r64(uint8_t **tb_ptr)
261 uint64_t value = tci_read_reg64(**tb_ptr);
262 *tb_ptr += 1;
263 return value;
265 #endif
267 /* Read indexed register(s) with target address from bytecode. */
268 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
270 target_ulong taddr = tci_read_r(tb_ptr);
271 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
272 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
273 #endif
274 return taddr;
277 /* Read indexed register or constant (native size) from bytecode. */
278 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
280 tcg_target_ulong value;
281 TCGReg r = **tb_ptr;
282 *tb_ptr += 1;
283 if (r == TCG_CONST) {
284 value = tci_read_i(tb_ptr);
285 } else {
286 value = tci_read_reg(r);
288 return value;
291 /* Read indexed register or constant (32 bit) from bytecode. */
292 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
294 uint32_t value;
295 TCGReg r = **tb_ptr;
296 *tb_ptr += 1;
297 if (r == TCG_CONST) {
298 value = tci_read_i32(tb_ptr);
299 } else {
300 value = tci_read_reg32(r);
302 return value;
305 #if TCG_TARGET_REG_BITS == 32
306 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
307 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
309 uint32_t low = tci_read_ri32(tb_ptr);
310 return tci_uint64(tci_read_ri32(tb_ptr), low);
312 #elif TCG_TARGET_REG_BITS == 64
313 /* Read indexed register or constant (64 bit) from bytecode. */
314 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
316 uint64_t value;
317 TCGReg r = **tb_ptr;
318 *tb_ptr += 1;
319 if (r == TCG_CONST) {
320 value = tci_read_i64(tb_ptr);
321 } else {
322 value = tci_read_reg64(r);
324 return value;
326 #endif
328 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
330 tcg_target_ulong label = tci_read_i(tb_ptr);
331 tci_assert(label != 0);
332 return label;
335 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
337 bool result = false;
338 int32_t i0 = u0;
339 int32_t i1 = u1;
340 switch (condition) {
341 case TCG_COND_EQ:
342 result = (u0 == u1);
343 break;
344 case TCG_COND_NE:
345 result = (u0 != u1);
346 break;
347 case TCG_COND_LT:
348 result = (i0 < i1);
349 break;
350 case TCG_COND_GE:
351 result = (i0 >= i1);
352 break;
353 case TCG_COND_LE:
354 result = (i0 <= i1);
355 break;
356 case TCG_COND_GT:
357 result = (i0 > i1);
358 break;
359 case TCG_COND_LTU:
360 result = (u0 < u1);
361 break;
362 case TCG_COND_GEU:
363 result = (u0 >= u1);
364 break;
365 case TCG_COND_LEU:
366 result = (u0 <= u1);
367 break;
368 case TCG_COND_GTU:
369 result = (u0 > u1);
370 break;
371 default:
372 TODO();
374 return result;
377 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
379 bool result = false;
380 int64_t i0 = u0;
381 int64_t i1 = u1;
382 switch (condition) {
383 case TCG_COND_EQ:
384 result = (u0 == u1);
385 break;
386 case TCG_COND_NE:
387 result = (u0 != u1);
388 break;
389 case TCG_COND_LT:
390 result = (i0 < i1);
391 break;
392 case TCG_COND_GE:
393 result = (i0 >= i1);
394 break;
395 case TCG_COND_LE:
396 result = (i0 <= i1);
397 break;
398 case TCG_COND_GT:
399 result = (i0 > i1);
400 break;
401 case TCG_COND_LTU:
402 result = (u0 < u1);
403 break;
404 case TCG_COND_GEU:
405 result = (u0 >= u1);
406 break;
407 case TCG_COND_LEU:
408 result = (u0 <= u1);
409 break;
410 case TCG_COND_GTU:
411 result = (u0 > u1);
412 break;
413 default:
414 TODO();
416 return result;
419 #ifdef CONFIG_SOFTMMU
420 # define qemu_ld_ub \
421 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
422 # define qemu_ld_leuw \
423 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
424 # define qemu_ld_leul \
425 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
426 # define qemu_ld_leq \
427 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
428 # define qemu_ld_beuw \
429 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
430 # define qemu_ld_beul \
431 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
432 # define qemu_ld_beq \
433 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
434 # define qemu_st_b(X) \
435 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
436 # define qemu_st_lew(X) \
437 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
438 # define qemu_st_lel(X) \
439 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
440 # define qemu_st_leq(X) \
441 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
442 # define qemu_st_bew(X) \
443 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
444 # define qemu_st_bel(X) \
445 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
446 # define qemu_st_beq(X) \
447 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
448 #else
449 # define qemu_ld_ub ldub_p(g2h(taddr))
450 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
451 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
452 # define qemu_ld_leq ldq_le_p(g2h(taddr))
453 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
454 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
455 # define qemu_ld_beq ldq_be_p(g2h(taddr))
456 # define qemu_st_b(X) stb_p(g2h(taddr), X)
457 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
458 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
459 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
460 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
461 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
462 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
463 #endif
465 /* Interpret pseudo code in tb. */
466 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
468 long tcg_temps[CPU_TEMP_BUF_NLONGS];
469 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
470 uintptr_t ret = 0;
472 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
473 tci_reg[TCG_REG_CALL_STACK] = sp_value;
474 tci_assert(tb_ptr);
476 for (;;) {
477 TCGOpcode opc = tb_ptr[0];
478 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
479 uint8_t op_size = tb_ptr[1];
480 uint8_t *old_code_ptr = tb_ptr;
481 #endif
482 tcg_target_ulong t0;
483 tcg_target_ulong t1;
484 tcg_target_ulong t2;
485 tcg_target_ulong label;
486 TCGCond condition;
487 target_ulong taddr;
488 uint8_t tmp8;
489 uint16_t tmp16;
490 uint32_t tmp32;
491 uint64_t tmp64;
492 #if TCG_TARGET_REG_BITS == 32
493 uint64_t v64;
494 #endif
495 TCGMemOpIdx oi;
497 #if defined(GETPC)
498 tci_tb_ptr = (uintptr_t)tb_ptr;
499 #endif
501 /* Skip opcode and size entry. */
502 tb_ptr += 2;
504 switch (opc) {
505 case INDEX_op_call:
506 t0 = tci_read_ri(&tb_ptr);
507 #if TCG_TARGET_REG_BITS == 32
508 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
509 tci_read_reg(TCG_REG_R1),
510 tci_read_reg(TCG_REG_R2),
511 tci_read_reg(TCG_REG_R3),
512 tci_read_reg(TCG_REG_R5),
513 tci_read_reg(TCG_REG_R6),
514 tci_read_reg(TCG_REG_R7),
515 tci_read_reg(TCG_REG_R8),
516 tci_read_reg(TCG_REG_R9),
517 tci_read_reg(TCG_REG_R10));
518 tci_write_reg(TCG_REG_R0, tmp64);
519 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
520 #else
521 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
522 tci_read_reg(TCG_REG_R1),
523 tci_read_reg(TCG_REG_R2),
524 tci_read_reg(TCG_REG_R3),
525 tci_read_reg(TCG_REG_R5));
526 tci_write_reg(TCG_REG_R0, tmp64);
527 #endif
528 break;
529 case INDEX_op_br:
530 label = tci_read_label(&tb_ptr);
531 tci_assert(tb_ptr == old_code_ptr + op_size);
532 tb_ptr = (uint8_t *)label;
533 continue;
534 case INDEX_op_setcond_i32:
535 t0 = *tb_ptr++;
536 t1 = tci_read_r32(&tb_ptr);
537 t2 = tci_read_ri32(&tb_ptr);
538 condition = *tb_ptr++;
539 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
540 break;
541 #if TCG_TARGET_REG_BITS == 32
542 case INDEX_op_setcond2_i32:
543 t0 = *tb_ptr++;
544 tmp64 = tci_read_r64(&tb_ptr);
545 v64 = tci_read_ri64(&tb_ptr);
546 condition = *tb_ptr++;
547 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
548 break;
549 #elif TCG_TARGET_REG_BITS == 64
550 case INDEX_op_setcond_i64:
551 t0 = *tb_ptr++;
552 t1 = tci_read_r64(&tb_ptr);
553 t2 = tci_read_ri64(&tb_ptr);
554 condition = *tb_ptr++;
555 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
556 break;
557 #endif
558 case INDEX_op_mov_i32:
559 t0 = *tb_ptr++;
560 t1 = tci_read_r32(&tb_ptr);
561 tci_write_reg32(t0, t1);
562 break;
563 case INDEX_op_movi_i32:
564 t0 = *tb_ptr++;
565 t1 = tci_read_i32(&tb_ptr);
566 tci_write_reg32(t0, t1);
567 break;
569 /* Load/store operations (32 bit). */
571 case INDEX_op_ld8u_i32:
572 t0 = *tb_ptr++;
573 t1 = tci_read_r(&tb_ptr);
574 t2 = tci_read_s32(&tb_ptr);
575 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
576 break;
577 case INDEX_op_ld8s_i32:
578 case INDEX_op_ld16u_i32:
579 TODO();
580 break;
581 case INDEX_op_ld16s_i32:
582 TODO();
583 break;
584 case INDEX_op_ld_i32:
585 t0 = *tb_ptr++;
586 t1 = tci_read_r(&tb_ptr);
587 t2 = tci_read_s32(&tb_ptr);
588 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
589 break;
590 case INDEX_op_st8_i32:
591 t0 = tci_read_r8(&tb_ptr);
592 t1 = tci_read_r(&tb_ptr);
593 t2 = tci_read_s32(&tb_ptr);
594 *(uint8_t *)(t1 + t2) = t0;
595 break;
596 case INDEX_op_st16_i32:
597 t0 = tci_read_r16(&tb_ptr);
598 t1 = tci_read_r(&tb_ptr);
599 t2 = tci_read_s32(&tb_ptr);
600 *(uint16_t *)(t1 + t2) = t0;
601 break;
602 case INDEX_op_st_i32:
603 t0 = tci_read_r32(&tb_ptr);
604 t1 = tci_read_r(&tb_ptr);
605 t2 = tci_read_s32(&tb_ptr);
606 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
607 *(uint32_t *)(t1 + t2) = t0;
608 break;
610 /* Arithmetic operations (32 bit). */
612 case INDEX_op_add_i32:
613 t0 = *tb_ptr++;
614 t1 = tci_read_ri32(&tb_ptr);
615 t2 = tci_read_ri32(&tb_ptr);
616 tci_write_reg32(t0, t1 + t2);
617 break;
618 case INDEX_op_sub_i32:
619 t0 = *tb_ptr++;
620 t1 = tci_read_ri32(&tb_ptr);
621 t2 = tci_read_ri32(&tb_ptr);
622 tci_write_reg32(t0, t1 - t2);
623 break;
624 case INDEX_op_mul_i32:
625 t0 = *tb_ptr++;
626 t1 = tci_read_ri32(&tb_ptr);
627 t2 = tci_read_ri32(&tb_ptr);
628 tci_write_reg32(t0, t1 * t2);
629 break;
630 #if TCG_TARGET_HAS_div_i32
631 case INDEX_op_div_i32:
632 t0 = *tb_ptr++;
633 t1 = tci_read_ri32(&tb_ptr);
634 t2 = tci_read_ri32(&tb_ptr);
635 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
636 break;
637 case INDEX_op_divu_i32:
638 t0 = *tb_ptr++;
639 t1 = tci_read_ri32(&tb_ptr);
640 t2 = tci_read_ri32(&tb_ptr);
641 tci_write_reg32(t0, t1 / t2);
642 break;
643 case INDEX_op_rem_i32:
644 t0 = *tb_ptr++;
645 t1 = tci_read_ri32(&tb_ptr);
646 t2 = tci_read_ri32(&tb_ptr);
647 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
648 break;
649 case INDEX_op_remu_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_ri32(&tb_ptr);
652 t2 = tci_read_ri32(&tb_ptr);
653 tci_write_reg32(t0, t1 % t2);
654 break;
655 #elif TCG_TARGET_HAS_div2_i32
656 case INDEX_op_div2_i32:
657 case INDEX_op_divu2_i32:
658 TODO();
659 break;
660 #endif
661 case INDEX_op_and_i32:
662 t0 = *tb_ptr++;
663 t1 = tci_read_ri32(&tb_ptr);
664 t2 = tci_read_ri32(&tb_ptr);
665 tci_write_reg32(t0, t1 & t2);
666 break;
667 case INDEX_op_or_i32:
668 t0 = *tb_ptr++;
669 t1 = tci_read_ri32(&tb_ptr);
670 t2 = tci_read_ri32(&tb_ptr);
671 tci_write_reg32(t0, t1 | t2);
672 break;
673 case INDEX_op_xor_i32:
674 t0 = *tb_ptr++;
675 t1 = tci_read_ri32(&tb_ptr);
676 t2 = tci_read_ri32(&tb_ptr);
677 tci_write_reg32(t0, t1 ^ t2);
678 break;
680 /* Shift/rotate operations (32 bit). */
682 case INDEX_op_shl_i32:
683 t0 = *tb_ptr++;
684 t1 = tci_read_ri32(&tb_ptr);
685 t2 = tci_read_ri32(&tb_ptr);
686 tci_write_reg32(t0, t1 << (t2 & 31));
687 break;
688 case INDEX_op_shr_i32:
689 t0 = *tb_ptr++;
690 t1 = tci_read_ri32(&tb_ptr);
691 t2 = tci_read_ri32(&tb_ptr);
692 tci_write_reg32(t0, t1 >> (t2 & 31));
693 break;
694 case INDEX_op_sar_i32:
695 t0 = *tb_ptr++;
696 t1 = tci_read_ri32(&tb_ptr);
697 t2 = tci_read_ri32(&tb_ptr);
698 tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
699 break;
700 #if TCG_TARGET_HAS_rot_i32
701 case INDEX_op_rotl_i32:
702 t0 = *tb_ptr++;
703 t1 = tci_read_ri32(&tb_ptr);
704 t2 = tci_read_ri32(&tb_ptr);
705 tci_write_reg32(t0, rol32(t1, t2 & 31));
706 break;
707 case INDEX_op_rotr_i32:
708 t0 = *tb_ptr++;
709 t1 = tci_read_ri32(&tb_ptr);
710 t2 = tci_read_ri32(&tb_ptr);
711 tci_write_reg32(t0, ror32(t1, t2 & 31));
712 break;
713 #endif
714 #if TCG_TARGET_HAS_deposit_i32
715 case INDEX_op_deposit_i32:
716 t0 = *tb_ptr++;
717 t1 = tci_read_r32(&tb_ptr);
718 t2 = tci_read_r32(&tb_ptr);
719 tmp16 = *tb_ptr++;
720 tmp8 = *tb_ptr++;
721 tmp32 = (((1 << tmp8) - 1) << tmp16);
722 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
723 break;
724 #endif
725 case INDEX_op_brcond_i32:
726 t0 = tci_read_r32(&tb_ptr);
727 t1 = tci_read_ri32(&tb_ptr);
728 condition = *tb_ptr++;
729 label = tci_read_label(&tb_ptr);
730 if (tci_compare32(t0, t1, condition)) {
731 tci_assert(tb_ptr == old_code_ptr + op_size);
732 tb_ptr = (uint8_t *)label;
733 continue;
735 break;
736 #if TCG_TARGET_REG_BITS == 32
737 case INDEX_op_add2_i32:
738 t0 = *tb_ptr++;
739 t1 = *tb_ptr++;
740 tmp64 = tci_read_r64(&tb_ptr);
741 tmp64 += tci_read_r64(&tb_ptr);
742 tci_write_reg64(t1, t0, tmp64);
743 break;
744 case INDEX_op_sub2_i32:
745 t0 = *tb_ptr++;
746 t1 = *tb_ptr++;
747 tmp64 = tci_read_r64(&tb_ptr);
748 tmp64 -= tci_read_r64(&tb_ptr);
749 tci_write_reg64(t1, t0, tmp64);
750 break;
751 case INDEX_op_brcond2_i32:
752 tmp64 = tci_read_r64(&tb_ptr);
753 v64 = tci_read_ri64(&tb_ptr);
754 condition = *tb_ptr++;
755 label = tci_read_label(&tb_ptr);
756 if (tci_compare64(tmp64, v64, condition)) {
757 tci_assert(tb_ptr == old_code_ptr + op_size);
758 tb_ptr = (uint8_t *)label;
759 continue;
761 break;
762 case INDEX_op_mulu2_i32:
763 t0 = *tb_ptr++;
764 t1 = *tb_ptr++;
765 t2 = tci_read_r32(&tb_ptr);
766 tmp64 = tci_read_r32(&tb_ptr);
767 tci_write_reg64(t1, t0, t2 * tmp64);
768 break;
769 #endif /* TCG_TARGET_REG_BITS == 32 */
770 #if TCG_TARGET_HAS_ext8s_i32
771 case INDEX_op_ext8s_i32:
772 t0 = *tb_ptr++;
773 t1 = tci_read_r8s(&tb_ptr);
774 tci_write_reg32(t0, t1);
775 break;
776 #endif
777 #if TCG_TARGET_HAS_ext16s_i32
778 case INDEX_op_ext16s_i32:
779 t0 = *tb_ptr++;
780 t1 = tci_read_r16s(&tb_ptr);
781 tci_write_reg32(t0, t1);
782 break;
783 #endif
784 #if TCG_TARGET_HAS_ext8u_i32
785 case INDEX_op_ext8u_i32:
786 t0 = *tb_ptr++;
787 t1 = tci_read_r8(&tb_ptr);
788 tci_write_reg32(t0, t1);
789 break;
790 #endif
791 #if TCG_TARGET_HAS_ext16u_i32
792 case INDEX_op_ext16u_i32:
793 t0 = *tb_ptr++;
794 t1 = tci_read_r16(&tb_ptr);
795 tci_write_reg32(t0, t1);
796 break;
797 #endif
798 #if TCG_TARGET_HAS_bswap16_i32
799 case INDEX_op_bswap16_i32:
800 t0 = *tb_ptr++;
801 t1 = tci_read_r16(&tb_ptr);
802 tci_write_reg32(t0, bswap16(t1));
803 break;
804 #endif
805 #if TCG_TARGET_HAS_bswap32_i32
806 case INDEX_op_bswap32_i32:
807 t0 = *tb_ptr++;
808 t1 = tci_read_r32(&tb_ptr);
809 tci_write_reg32(t0, bswap32(t1));
810 break;
811 #endif
812 #if TCG_TARGET_HAS_not_i32
813 case INDEX_op_not_i32:
814 t0 = *tb_ptr++;
815 t1 = tci_read_r32(&tb_ptr);
816 tci_write_reg32(t0, ~t1);
817 break;
818 #endif
819 #if TCG_TARGET_HAS_neg_i32
820 case INDEX_op_neg_i32:
821 t0 = *tb_ptr++;
822 t1 = tci_read_r32(&tb_ptr);
823 tci_write_reg32(t0, -t1);
824 break;
825 #endif
826 #if TCG_TARGET_REG_BITS == 64
827 case INDEX_op_mov_i64:
828 t0 = *tb_ptr++;
829 t1 = tci_read_r64(&tb_ptr);
830 tci_write_reg64(t0, t1);
831 break;
832 case INDEX_op_movi_i64:
833 t0 = *tb_ptr++;
834 t1 = tci_read_i64(&tb_ptr);
835 tci_write_reg64(t0, t1);
836 break;
838 /* Load/store operations (64 bit). */
840 case INDEX_op_ld8u_i64:
841 t0 = *tb_ptr++;
842 t1 = tci_read_r(&tb_ptr);
843 t2 = tci_read_s32(&tb_ptr);
844 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
845 break;
846 case INDEX_op_ld8s_i64:
847 case INDEX_op_ld16u_i64:
848 case INDEX_op_ld16s_i64:
849 TODO();
850 break;
851 case INDEX_op_ld32u_i64:
852 t0 = *tb_ptr++;
853 t1 = tci_read_r(&tb_ptr);
854 t2 = tci_read_s32(&tb_ptr);
855 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
856 break;
857 case INDEX_op_ld32s_i64:
858 t0 = *tb_ptr++;
859 t1 = tci_read_r(&tb_ptr);
860 t2 = tci_read_s32(&tb_ptr);
861 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
862 break;
863 case INDEX_op_ld_i64:
864 t0 = *tb_ptr++;
865 t1 = tci_read_r(&tb_ptr);
866 t2 = tci_read_s32(&tb_ptr);
867 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
868 break;
869 case INDEX_op_st8_i64:
870 t0 = tci_read_r8(&tb_ptr);
871 t1 = tci_read_r(&tb_ptr);
872 t2 = tci_read_s32(&tb_ptr);
873 *(uint8_t *)(t1 + t2) = t0;
874 break;
875 case INDEX_op_st16_i64:
876 t0 = tci_read_r16(&tb_ptr);
877 t1 = tci_read_r(&tb_ptr);
878 t2 = tci_read_s32(&tb_ptr);
879 *(uint16_t *)(t1 + t2) = t0;
880 break;
881 case INDEX_op_st32_i64:
882 t0 = tci_read_r32(&tb_ptr);
883 t1 = tci_read_r(&tb_ptr);
884 t2 = tci_read_s32(&tb_ptr);
885 *(uint32_t *)(t1 + t2) = t0;
886 break;
887 case INDEX_op_st_i64:
888 t0 = tci_read_r64(&tb_ptr);
889 t1 = tci_read_r(&tb_ptr);
890 t2 = tci_read_s32(&tb_ptr);
891 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
892 *(uint64_t *)(t1 + t2) = t0;
893 break;
895 /* Arithmetic operations (64 bit). */
897 case INDEX_op_add_i64:
898 t0 = *tb_ptr++;
899 t1 = tci_read_ri64(&tb_ptr);
900 t2 = tci_read_ri64(&tb_ptr);
901 tci_write_reg64(t0, t1 + t2);
902 break;
903 case INDEX_op_sub_i64:
904 t0 = *tb_ptr++;
905 t1 = tci_read_ri64(&tb_ptr);
906 t2 = tci_read_ri64(&tb_ptr);
907 tci_write_reg64(t0, t1 - t2);
908 break;
909 case INDEX_op_mul_i64:
910 t0 = *tb_ptr++;
911 t1 = tci_read_ri64(&tb_ptr);
912 t2 = tci_read_ri64(&tb_ptr);
913 tci_write_reg64(t0, t1 * t2);
914 break;
915 #if TCG_TARGET_HAS_div_i64
916 case INDEX_op_div_i64:
917 case INDEX_op_divu_i64:
918 case INDEX_op_rem_i64:
919 case INDEX_op_remu_i64:
920 TODO();
921 break;
922 #elif TCG_TARGET_HAS_div2_i64
923 case INDEX_op_div2_i64:
924 case INDEX_op_divu2_i64:
925 TODO();
926 break;
927 #endif
928 case INDEX_op_and_i64:
929 t0 = *tb_ptr++;
930 t1 = tci_read_ri64(&tb_ptr);
931 t2 = tci_read_ri64(&tb_ptr);
932 tci_write_reg64(t0, t1 & t2);
933 break;
934 case INDEX_op_or_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_ri64(&tb_ptr);
937 t2 = tci_read_ri64(&tb_ptr);
938 tci_write_reg64(t0, t1 | t2);
939 break;
940 case INDEX_op_xor_i64:
941 t0 = *tb_ptr++;
942 t1 = tci_read_ri64(&tb_ptr);
943 t2 = tci_read_ri64(&tb_ptr);
944 tci_write_reg64(t0, t1 ^ t2);
945 break;
947 /* Shift/rotate operations (64 bit). */
949 case INDEX_op_shl_i64:
950 t0 = *tb_ptr++;
951 t1 = tci_read_ri64(&tb_ptr);
952 t2 = tci_read_ri64(&tb_ptr);
953 tci_write_reg64(t0, t1 << (t2 & 63));
954 break;
955 case INDEX_op_shr_i64:
956 t0 = *tb_ptr++;
957 t1 = tci_read_ri64(&tb_ptr);
958 t2 = tci_read_ri64(&tb_ptr);
959 tci_write_reg64(t0, t1 >> (t2 & 63));
960 break;
961 case INDEX_op_sar_i64:
962 t0 = *tb_ptr++;
963 t1 = tci_read_ri64(&tb_ptr);
964 t2 = tci_read_ri64(&tb_ptr);
965 tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
966 break;
967 #if TCG_TARGET_HAS_rot_i64
968 case INDEX_op_rotl_i64:
969 t0 = *tb_ptr++;
970 t1 = tci_read_ri64(&tb_ptr);
971 t2 = tci_read_ri64(&tb_ptr);
972 tci_write_reg64(t0, rol64(t1, t2 & 63));
973 break;
974 case INDEX_op_rotr_i64:
975 t0 = *tb_ptr++;
976 t1 = tci_read_ri64(&tb_ptr);
977 t2 = tci_read_ri64(&tb_ptr);
978 tci_write_reg64(t0, ror64(t1, t2 & 63));
979 break;
980 #endif
981 #if TCG_TARGET_HAS_deposit_i64
982 case INDEX_op_deposit_i64:
983 t0 = *tb_ptr++;
984 t1 = tci_read_r64(&tb_ptr);
985 t2 = tci_read_r64(&tb_ptr);
986 tmp16 = *tb_ptr++;
987 tmp8 = *tb_ptr++;
988 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
989 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
990 break;
991 #endif
992 case INDEX_op_brcond_i64:
993 t0 = tci_read_r64(&tb_ptr);
994 t1 = tci_read_ri64(&tb_ptr);
995 condition = *tb_ptr++;
996 label = tci_read_label(&tb_ptr);
997 if (tci_compare64(t0, t1, condition)) {
998 tci_assert(tb_ptr == old_code_ptr + op_size);
999 tb_ptr = (uint8_t *)label;
1000 continue;
1002 break;
1003 #if TCG_TARGET_HAS_ext8u_i64
1004 case INDEX_op_ext8u_i64:
1005 t0 = *tb_ptr++;
1006 t1 = tci_read_r8(&tb_ptr);
1007 tci_write_reg64(t0, t1);
1008 break;
1009 #endif
1010 #if TCG_TARGET_HAS_ext8s_i64
1011 case INDEX_op_ext8s_i64:
1012 t0 = *tb_ptr++;
1013 t1 = tci_read_r8s(&tb_ptr);
1014 tci_write_reg64(t0, t1);
1015 break;
1016 #endif
1017 #if TCG_TARGET_HAS_ext16s_i64
1018 case INDEX_op_ext16s_i64:
1019 t0 = *tb_ptr++;
1020 t1 = tci_read_r16s(&tb_ptr);
1021 tci_write_reg64(t0, t1);
1022 break;
1023 #endif
1024 #if TCG_TARGET_HAS_ext16u_i64
1025 case INDEX_op_ext16u_i64:
1026 t0 = *tb_ptr++;
1027 t1 = tci_read_r16(&tb_ptr);
1028 tci_write_reg64(t0, t1);
1029 break;
1030 #endif
1031 #if TCG_TARGET_HAS_ext32s_i64
1032 case INDEX_op_ext32s_i64:
1033 #endif
1034 case INDEX_op_ext_i32_i64:
1035 t0 = *tb_ptr++;
1036 t1 = tci_read_r32s(&tb_ptr);
1037 tci_write_reg64(t0, t1);
1038 break;
1039 #if TCG_TARGET_HAS_ext32u_i64
1040 case INDEX_op_ext32u_i64:
1041 #endif
1042 case INDEX_op_extu_i32_i64:
1043 t0 = *tb_ptr++;
1044 t1 = tci_read_r32(&tb_ptr);
1045 tci_write_reg64(t0, t1);
1046 break;
1047 #if TCG_TARGET_HAS_bswap16_i64
1048 case INDEX_op_bswap16_i64:
1049 TODO();
1050 t0 = *tb_ptr++;
1051 t1 = tci_read_r16(&tb_ptr);
1052 tci_write_reg64(t0, bswap16(t1));
1053 break;
1054 #endif
1055 #if TCG_TARGET_HAS_bswap32_i64
1056 case INDEX_op_bswap32_i64:
1057 t0 = *tb_ptr++;
1058 t1 = tci_read_r32(&tb_ptr);
1059 tci_write_reg64(t0, bswap32(t1));
1060 break;
1061 #endif
1062 #if TCG_TARGET_HAS_bswap64_i64
1063 case INDEX_op_bswap64_i64:
1064 t0 = *tb_ptr++;
1065 t1 = tci_read_r64(&tb_ptr);
1066 tci_write_reg64(t0, bswap64(t1));
1067 break;
1068 #endif
1069 #if TCG_TARGET_HAS_not_i64
1070 case INDEX_op_not_i64:
1071 t0 = *tb_ptr++;
1072 t1 = tci_read_r64(&tb_ptr);
1073 tci_write_reg64(t0, ~t1);
1074 break;
1075 #endif
1076 #if TCG_TARGET_HAS_neg_i64
1077 case INDEX_op_neg_i64:
1078 t0 = *tb_ptr++;
1079 t1 = tci_read_r64(&tb_ptr);
1080 tci_write_reg64(t0, -t1);
1081 break;
1082 #endif
1083 #endif /* TCG_TARGET_REG_BITS == 64 */
1085 /* QEMU specific operations. */
1087 case INDEX_op_exit_tb:
1088 ret = *(uint64_t *)tb_ptr;
1089 goto exit;
1090 break;
1091 case INDEX_op_goto_tb:
1092 /* Jump address is aligned */
1093 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1094 t0 = atomic_read((int32_t *)tb_ptr);
1095 tb_ptr += sizeof(int32_t);
1096 tci_assert(tb_ptr == old_code_ptr + op_size);
1097 tb_ptr += (int32_t)t0;
1098 continue;
1099 case INDEX_op_qemu_ld_i32:
1100 t0 = *tb_ptr++;
1101 taddr = tci_read_ulong(&tb_ptr);
1102 oi = tci_read_i(&tb_ptr);
1103 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1104 case MO_UB:
1105 tmp32 = qemu_ld_ub;
1106 break;
1107 case MO_SB:
1108 tmp32 = (int8_t)qemu_ld_ub;
1109 break;
1110 case MO_LEUW:
1111 tmp32 = qemu_ld_leuw;
1112 break;
1113 case MO_LESW:
1114 tmp32 = (int16_t)qemu_ld_leuw;
1115 break;
1116 case MO_LEUL:
1117 tmp32 = qemu_ld_leul;
1118 break;
1119 case MO_BEUW:
1120 tmp32 = qemu_ld_beuw;
1121 break;
1122 case MO_BESW:
1123 tmp32 = (int16_t)qemu_ld_beuw;
1124 break;
1125 case MO_BEUL:
1126 tmp32 = qemu_ld_beul;
1127 break;
1128 default:
1129 tcg_abort();
1131 tci_write_reg(t0, tmp32);
1132 break;
1133 case INDEX_op_qemu_ld_i64:
1134 t0 = *tb_ptr++;
1135 if (TCG_TARGET_REG_BITS == 32) {
1136 t1 = *tb_ptr++;
1138 taddr = tci_read_ulong(&tb_ptr);
1139 oi = tci_read_i(&tb_ptr);
1140 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1141 case MO_UB:
1142 tmp64 = qemu_ld_ub;
1143 break;
1144 case MO_SB:
1145 tmp64 = (int8_t)qemu_ld_ub;
1146 break;
1147 case MO_LEUW:
1148 tmp64 = qemu_ld_leuw;
1149 break;
1150 case MO_LESW:
1151 tmp64 = (int16_t)qemu_ld_leuw;
1152 break;
1153 case MO_LEUL:
1154 tmp64 = qemu_ld_leul;
1155 break;
1156 case MO_LESL:
1157 tmp64 = (int32_t)qemu_ld_leul;
1158 break;
1159 case MO_LEQ:
1160 tmp64 = qemu_ld_leq;
1161 break;
1162 case MO_BEUW:
1163 tmp64 = qemu_ld_beuw;
1164 break;
1165 case MO_BESW:
1166 tmp64 = (int16_t)qemu_ld_beuw;
1167 break;
1168 case MO_BEUL:
1169 tmp64 = qemu_ld_beul;
1170 break;
1171 case MO_BESL:
1172 tmp64 = (int32_t)qemu_ld_beul;
1173 break;
1174 case MO_BEQ:
1175 tmp64 = qemu_ld_beq;
1176 break;
1177 default:
1178 tcg_abort();
1180 tci_write_reg(t0, tmp64);
1181 if (TCG_TARGET_REG_BITS == 32) {
1182 tci_write_reg(t1, tmp64 >> 32);
1184 break;
1185 case INDEX_op_qemu_st_i32:
1186 t0 = tci_read_r(&tb_ptr);
1187 taddr = tci_read_ulong(&tb_ptr);
1188 oi = tci_read_i(&tb_ptr);
1189 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1190 case MO_UB:
1191 qemu_st_b(t0);
1192 break;
1193 case MO_LEUW:
1194 qemu_st_lew(t0);
1195 break;
1196 case MO_LEUL:
1197 qemu_st_lel(t0);
1198 break;
1199 case MO_BEUW:
1200 qemu_st_bew(t0);
1201 break;
1202 case MO_BEUL:
1203 qemu_st_bel(t0);
1204 break;
1205 default:
1206 tcg_abort();
1208 break;
1209 case INDEX_op_qemu_st_i64:
1210 tmp64 = tci_read_r64(&tb_ptr);
1211 taddr = tci_read_ulong(&tb_ptr);
1212 oi = tci_read_i(&tb_ptr);
1213 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1214 case MO_UB:
1215 qemu_st_b(tmp64);
1216 break;
1217 case MO_LEUW:
1218 qemu_st_lew(tmp64);
1219 break;
1220 case MO_LEUL:
1221 qemu_st_lel(tmp64);
1222 break;
1223 case MO_LEQ:
1224 qemu_st_leq(tmp64);
1225 break;
1226 case MO_BEUW:
1227 qemu_st_bew(tmp64);
1228 break;
1229 case MO_BEUL:
1230 qemu_st_bel(tmp64);
1231 break;
1232 case MO_BEQ:
1233 qemu_st_beq(tmp64);
1234 break;
1235 default:
1236 tcg_abort();
1238 break;
1239 case INDEX_op_mb:
1240 /* Ensure ordering for all kinds */
1241 smp_mb();
1242 break;
1243 default:
1244 TODO();
1245 break;
1247 tci_assert(tb_ptr == old_code_ptr + op_size);
1249 exit:
1250 return ret;