scsi: move host_status handling into SCSI drivers
[qemu/ar7.git] / tcg / tci.c
blobfb3c97aaf1aff2bfbd988f07708784b30748a5f5
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
26 #else
27 # define tci_assert(cond) ((void)0)
28 #endif
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
38 #endif
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
41 tcg_target_ulong, tcg_target_ulong,
42 tcg_target_ulong, tcg_target_ulong,
43 tcg_target_ulong, tcg_target_ulong,
44 tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong);
46 #else
47 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong,
49 tcg_target_ulong, tcg_target_ulong);
50 #endif
52 __thread uintptr_t tci_tb_ptr;
54 static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
56 tci_assert(index < TCG_TARGET_NB_REGS);
57 return regs[index];
60 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
61 static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
63 return (int8_t)tci_read_reg(regs, index);
65 #endif
67 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
68 static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
70 return (int16_t)tci_read_reg(regs, index);
72 #endif
74 #if TCG_TARGET_REG_BITS == 64
75 static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
77 return (int32_t)tci_read_reg(regs, index);
79 #endif
81 static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
83 return (uint8_t)tci_read_reg(regs, index);
86 static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
88 return (uint16_t)tci_read_reg(regs, index);
91 static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
93 return (uint32_t)tci_read_reg(regs, index);
96 #if TCG_TARGET_REG_BITS == 64
97 static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
99 return tci_read_reg(regs, index);
101 #endif
103 static void
104 tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
106 tci_assert(index < TCG_TARGET_NB_REGS);
107 tci_assert(index != TCG_AREG0);
108 tci_assert(index != TCG_REG_CALL_STACK);
109 regs[index] = value;
112 #if TCG_TARGET_REG_BITS == 32
113 static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
114 uint32_t low_index, uint64_t value)
116 tci_write_reg(regs, low_index, value);
117 tci_write_reg(regs, high_index, value >> 32);
119 #endif
121 #if TCG_TARGET_REG_BITS == 32
122 /* Create a 64 bit value from two 32 bit values. */
123 static uint64_t tci_uint64(uint32_t high, uint32_t low)
125 return ((uint64_t)high << 32) + low;
127 #endif
129 /* Read constant (native size) from bytecode. */
130 static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr)
132 tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr);
133 *tb_ptr += sizeof(value);
134 return value;
137 /* Read unsigned constant (32 bit) from bytecode. */
138 static uint32_t tci_read_i32(const uint8_t **tb_ptr)
140 uint32_t value = *(const uint32_t *)(*tb_ptr);
141 *tb_ptr += sizeof(value);
142 return value;
145 /* Read signed constant (32 bit) from bytecode. */
146 static int32_t tci_read_s32(const uint8_t **tb_ptr)
148 int32_t value = *(const int32_t *)(*tb_ptr);
149 *tb_ptr += sizeof(value);
150 return value;
153 #if TCG_TARGET_REG_BITS == 64
154 /* Read constant (64 bit) from bytecode. */
155 static uint64_t tci_read_i64(const uint8_t **tb_ptr)
157 uint64_t value = *(const uint64_t *)(*tb_ptr);
158 *tb_ptr += sizeof(value);
159 return value;
161 #endif
163 /* Read indexed register (native size) from bytecode. */
164 static tcg_target_ulong
165 tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
167 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
168 *tb_ptr += 1;
169 return value;
172 /* Read indexed register (8 bit) from bytecode. */
173 static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
175 uint8_t value = tci_read_reg8(regs, **tb_ptr);
176 *tb_ptr += 1;
177 return value;
180 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
181 /* Read indexed register (8 bit signed) from bytecode. */
182 static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
184 int8_t value = tci_read_reg8s(regs, **tb_ptr);
185 *tb_ptr += 1;
186 return value;
188 #endif
190 /* Read indexed register (16 bit) from bytecode. */
191 static uint16_t tci_read_r16(const tcg_target_ulong *regs,
192 const uint8_t **tb_ptr)
194 uint16_t value = tci_read_reg16(regs, **tb_ptr);
195 *tb_ptr += 1;
196 return value;
199 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
200 /* Read indexed register (16 bit signed) from bytecode. */
201 static int16_t tci_read_r16s(const tcg_target_ulong *regs,
202 const uint8_t **tb_ptr)
204 int16_t value = tci_read_reg16s(regs, **tb_ptr);
205 *tb_ptr += 1;
206 return value;
208 #endif
210 /* Read indexed register (32 bit) from bytecode. */
211 static uint32_t tci_read_r32(const tcg_target_ulong *regs,
212 const uint8_t **tb_ptr)
214 uint32_t value = tci_read_reg32(regs, **tb_ptr);
215 *tb_ptr += 1;
216 return value;
219 #if TCG_TARGET_REG_BITS == 32
220 /* Read two indexed registers (2 * 32 bit) from bytecode. */
221 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
222 const uint8_t **tb_ptr)
224 uint32_t low = tci_read_r32(regs, tb_ptr);
225 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
227 #elif TCG_TARGET_REG_BITS == 64
228 /* Read indexed register (32 bit signed) from bytecode. */
229 static int32_t tci_read_r32s(const tcg_target_ulong *regs,
230 const uint8_t **tb_ptr)
232 int32_t value = tci_read_reg32s(regs, **tb_ptr);
233 *tb_ptr += 1;
234 return value;
237 /* Read indexed register (64 bit) from bytecode. */
238 static uint64_t tci_read_r64(const tcg_target_ulong *regs,
239 const uint8_t **tb_ptr)
241 uint64_t value = tci_read_reg64(regs, **tb_ptr);
242 *tb_ptr += 1;
243 return value;
245 #endif
247 /* Read indexed register(s) with target address from bytecode. */
248 static target_ulong
249 tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr)
251 target_ulong taddr = tci_read_r(regs, tb_ptr);
252 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
253 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
254 #endif
255 return taddr;
258 static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr)
260 tcg_target_ulong label = tci_read_i(tb_ptr);
261 tci_assert(label != 0);
262 return label;
265 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
267 bool result = false;
268 int32_t i0 = u0;
269 int32_t i1 = u1;
270 switch (condition) {
271 case TCG_COND_EQ:
272 result = (u0 == u1);
273 break;
274 case TCG_COND_NE:
275 result = (u0 != u1);
276 break;
277 case TCG_COND_LT:
278 result = (i0 < i1);
279 break;
280 case TCG_COND_GE:
281 result = (i0 >= i1);
282 break;
283 case TCG_COND_LE:
284 result = (i0 <= i1);
285 break;
286 case TCG_COND_GT:
287 result = (i0 > i1);
288 break;
289 case TCG_COND_LTU:
290 result = (u0 < u1);
291 break;
292 case TCG_COND_GEU:
293 result = (u0 >= u1);
294 break;
295 case TCG_COND_LEU:
296 result = (u0 <= u1);
297 break;
298 case TCG_COND_GTU:
299 result = (u0 > u1);
300 break;
301 default:
302 g_assert_not_reached();
304 return result;
307 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
309 bool result = false;
310 int64_t i0 = u0;
311 int64_t i1 = u1;
312 switch (condition) {
313 case TCG_COND_EQ:
314 result = (u0 == u1);
315 break;
316 case TCG_COND_NE:
317 result = (u0 != u1);
318 break;
319 case TCG_COND_LT:
320 result = (i0 < i1);
321 break;
322 case TCG_COND_GE:
323 result = (i0 >= i1);
324 break;
325 case TCG_COND_LE:
326 result = (i0 <= i1);
327 break;
328 case TCG_COND_GT:
329 result = (i0 > i1);
330 break;
331 case TCG_COND_LTU:
332 result = (u0 < u1);
333 break;
334 case TCG_COND_GEU:
335 result = (u0 >= u1);
336 break;
337 case TCG_COND_LEU:
338 result = (u0 <= u1);
339 break;
340 case TCG_COND_GTU:
341 result = (u0 > u1);
342 break;
343 default:
344 g_assert_not_reached();
346 return result;
349 #ifdef CONFIG_SOFTMMU
350 # define qemu_ld_ub \
351 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
352 # define qemu_ld_leuw \
353 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
354 # define qemu_ld_leul \
355 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
356 # define qemu_ld_leq \
357 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
358 # define qemu_ld_beuw \
359 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
360 # define qemu_ld_beul \
361 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
362 # define qemu_ld_beq \
363 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
364 # define qemu_st_b(X) \
365 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
366 # define qemu_st_lew(X) \
367 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
368 # define qemu_st_lel(X) \
369 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
370 # define qemu_st_leq(X) \
371 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
372 # define qemu_st_bew(X) \
373 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
374 # define qemu_st_bel(X) \
375 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
376 # define qemu_st_beq(X) \
377 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
378 #else
379 # define qemu_ld_ub ldub_p(g2h(taddr))
380 # define qemu_ld_leuw lduw_le_p(g2h(taddr))
381 # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
382 # define qemu_ld_leq ldq_le_p(g2h(taddr))
383 # define qemu_ld_beuw lduw_be_p(g2h(taddr))
384 # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
385 # define qemu_ld_beq ldq_be_p(g2h(taddr))
386 # define qemu_st_b(X) stb_p(g2h(taddr), X)
387 # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
388 # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
389 # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
390 # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
391 # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
392 # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
393 #endif
395 #if TCG_TARGET_REG_BITS == 64
396 # define CASE_32_64(x) \
397 case glue(glue(INDEX_op_, x), _i64): \
398 case glue(glue(INDEX_op_, x), _i32):
399 # define CASE_64(x) \
400 case glue(glue(INDEX_op_, x), _i64):
401 #else
402 # define CASE_32_64(x) \
403 case glue(glue(INDEX_op_, x), _i32):
404 # define CASE_64(x)
405 #endif
407 /* Interpret pseudo code in tb. */
409 * Disable CFI checks.
410 * One possible operation in the pseudo code is a call to binary code.
411 * Therefore, disable CFI checks in the interpreter function
413 uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
414 const void *v_tb_ptr)
416 const uint8_t *tb_ptr = v_tb_ptr;
417 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
418 long tcg_temps[CPU_TEMP_BUF_NLONGS];
419 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
420 uintptr_t ret = 0;
422 regs[TCG_AREG0] = (tcg_target_ulong)env;
423 regs[TCG_REG_CALL_STACK] = sp_value;
424 tci_assert(tb_ptr);
426 for (;;) {
427 TCGOpcode opc = tb_ptr[0];
428 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
429 uint8_t op_size = tb_ptr[1];
430 const uint8_t *old_code_ptr = tb_ptr;
431 #endif
432 tcg_target_ulong t0;
433 tcg_target_ulong t1;
434 tcg_target_ulong t2;
435 tcg_target_ulong label;
436 TCGCond condition;
437 target_ulong taddr;
438 uint8_t tmp8;
439 uint16_t tmp16;
440 uint32_t tmp32;
441 uint64_t tmp64;
442 #if TCG_TARGET_REG_BITS == 32
443 uint64_t v64;
444 #endif
445 TCGMemOpIdx oi;
447 /* Skip opcode and size entry. */
448 tb_ptr += 2;
450 switch (opc) {
451 case INDEX_op_call:
452 t0 = tci_read_i(&tb_ptr);
453 tci_tb_ptr = (uintptr_t)tb_ptr;
454 #if TCG_TARGET_REG_BITS == 32
455 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
456 tci_read_reg(regs, TCG_REG_R1),
457 tci_read_reg(regs, TCG_REG_R2),
458 tci_read_reg(regs, TCG_REG_R3),
459 tci_read_reg(regs, TCG_REG_R4),
460 tci_read_reg(regs, TCG_REG_R5),
461 tci_read_reg(regs, TCG_REG_R6),
462 tci_read_reg(regs, TCG_REG_R7),
463 tci_read_reg(regs, TCG_REG_R8),
464 tci_read_reg(regs, TCG_REG_R9),
465 tci_read_reg(regs, TCG_REG_R10),
466 tci_read_reg(regs, TCG_REG_R11));
467 tci_write_reg(regs, TCG_REG_R0, tmp64);
468 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
469 #else
470 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
471 tci_read_reg(regs, TCG_REG_R1),
472 tci_read_reg(regs, TCG_REG_R2),
473 tci_read_reg(regs, TCG_REG_R3),
474 tci_read_reg(regs, TCG_REG_R4),
475 tci_read_reg(regs, TCG_REG_R5));
476 tci_write_reg(regs, TCG_REG_R0, tmp64);
477 #endif
478 break;
479 case INDEX_op_br:
480 label = tci_read_label(&tb_ptr);
481 tci_assert(tb_ptr == old_code_ptr + op_size);
482 tb_ptr = (uint8_t *)label;
483 continue;
484 case INDEX_op_setcond_i32:
485 t0 = *tb_ptr++;
486 t1 = tci_read_r32(regs, &tb_ptr);
487 t2 = tci_read_r32(regs, &tb_ptr);
488 condition = *tb_ptr++;
489 tci_write_reg(regs, t0, tci_compare32(t1, t2, condition));
490 break;
491 #if TCG_TARGET_REG_BITS == 32
492 case INDEX_op_setcond2_i32:
493 t0 = *tb_ptr++;
494 tmp64 = tci_read_r64(regs, &tb_ptr);
495 v64 = tci_read_r64(regs, &tb_ptr);
496 condition = *tb_ptr++;
497 tci_write_reg(regs, t0, tci_compare64(tmp64, v64, condition));
498 break;
499 #elif TCG_TARGET_REG_BITS == 64
500 case INDEX_op_setcond_i64:
501 t0 = *tb_ptr++;
502 t1 = tci_read_r64(regs, &tb_ptr);
503 t2 = tci_read_r64(regs, &tb_ptr);
504 condition = *tb_ptr++;
505 tci_write_reg(regs, t0, tci_compare64(t1, t2, condition));
506 break;
507 #endif
508 case INDEX_op_mov_i32:
509 t0 = *tb_ptr++;
510 t1 = tci_read_r32(regs, &tb_ptr);
511 tci_write_reg(regs, t0, t1);
512 break;
513 case INDEX_op_tci_movi_i32:
514 t0 = *tb_ptr++;
515 t1 = tci_read_i32(&tb_ptr);
516 tci_write_reg(regs, t0, t1);
517 break;
519 /* Load/store operations (32 bit). */
521 CASE_32_64(ld8u)
522 t0 = *tb_ptr++;
523 t1 = tci_read_r(regs, &tb_ptr);
524 t2 = tci_read_s32(&tb_ptr);
525 tci_write_reg(regs, t0, *(uint8_t *)(t1 + t2));
526 break;
527 CASE_32_64(ld8s)
528 t0 = *tb_ptr++;
529 t1 = tci_read_r(regs, &tb_ptr);
530 t2 = tci_read_s32(&tb_ptr);
531 tci_write_reg(regs, t0, *(int8_t *)(t1 + t2));
532 break;
533 CASE_32_64(ld16u)
534 t0 = *tb_ptr++;
535 t1 = tci_read_r(regs, &tb_ptr);
536 t2 = tci_read_s32(&tb_ptr);
537 tci_write_reg(regs, t0, *(uint16_t *)(t1 + t2));
538 break;
539 CASE_32_64(ld16s)
540 t0 = *tb_ptr++;
541 t1 = tci_read_r(regs, &tb_ptr);
542 t2 = tci_read_s32(&tb_ptr);
543 tci_write_reg(regs, t0, *(int16_t *)(t1 + t2));
544 break;
545 case INDEX_op_ld_i32:
546 CASE_64(ld32u)
547 t0 = *tb_ptr++;
548 t1 = tci_read_r(regs, &tb_ptr);
549 t2 = tci_read_s32(&tb_ptr);
550 tci_write_reg(regs, t0, *(uint32_t *)(t1 + t2));
551 break;
552 CASE_32_64(st8)
553 t0 = tci_read_r8(regs, &tb_ptr);
554 t1 = tci_read_r(regs, &tb_ptr);
555 t2 = tci_read_s32(&tb_ptr);
556 *(uint8_t *)(t1 + t2) = t0;
557 break;
558 CASE_32_64(st16)
559 t0 = tci_read_r16(regs, &tb_ptr);
560 t1 = tci_read_r(regs, &tb_ptr);
561 t2 = tci_read_s32(&tb_ptr);
562 *(uint16_t *)(t1 + t2) = t0;
563 break;
564 case INDEX_op_st_i32:
565 CASE_64(st32)
566 t0 = tci_read_r32(regs, &tb_ptr);
567 t1 = tci_read_r(regs, &tb_ptr);
568 t2 = tci_read_s32(&tb_ptr);
569 *(uint32_t *)(t1 + t2) = t0;
570 break;
572 /* Arithmetic operations (32 bit). */
574 case INDEX_op_add_i32:
575 t0 = *tb_ptr++;
576 t1 = tci_read_r32(regs, &tb_ptr);
577 t2 = tci_read_r32(regs, &tb_ptr);
578 tci_write_reg(regs, t0, t1 + t2);
579 break;
580 case INDEX_op_sub_i32:
581 t0 = *tb_ptr++;
582 t1 = tci_read_r32(regs, &tb_ptr);
583 t2 = tci_read_r32(regs, &tb_ptr);
584 tci_write_reg(regs, t0, t1 - t2);
585 break;
586 case INDEX_op_mul_i32:
587 t0 = *tb_ptr++;
588 t1 = tci_read_r32(regs, &tb_ptr);
589 t2 = tci_read_r32(regs, &tb_ptr);
590 tci_write_reg(regs, t0, t1 * t2);
591 break;
592 case INDEX_op_div_i32:
593 t0 = *tb_ptr++;
594 t1 = tci_read_r32(regs, &tb_ptr);
595 t2 = tci_read_r32(regs, &tb_ptr);
596 tci_write_reg(regs, t0, (int32_t)t1 / (int32_t)t2);
597 break;
598 case INDEX_op_divu_i32:
599 t0 = *tb_ptr++;
600 t1 = tci_read_r32(regs, &tb_ptr);
601 t2 = tci_read_r32(regs, &tb_ptr);
602 tci_write_reg(regs, t0, t1 / t2);
603 break;
604 case INDEX_op_rem_i32:
605 t0 = *tb_ptr++;
606 t1 = tci_read_r32(regs, &tb_ptr);
607 t2 = tci_read_r32(regs, &tb_ptr);
608 tci_write_reg(regs, t0, (int32_t)t1 % (int32_t)t2);
609 break;
610 case INDEX_op_remu_i32:
611 t0 = *tb_ptr++;
612 t1 = tci_read_r32(regs, &tb_ptr);
613 t2 = tci_read_r32(regs, &tb_ptr);
614 tci_write_reg(regs, t0, t1 % t2);
615 break;
616 case INDEX_op_and_i32:
617 t0 = *tb_ptr++;
618 t1 = tci_read_r32(regs, &tb_ptr);
619 t2 = tci_read_r32(regs, &tb_ptr);
620 tci_write_reg(regs, t0, t1 & t2);
621 break;
622 case INDEX_op_or_i32:
623 t0 = *tb_ptr++;
624 t1 = tci_read_r32(regs, &tb_ptr);
625 t2 = tci_read_r32(regs, &tb_ptr);
626 tci_write_reg(regs, t0, t1 | t2);
627 break;
628 case INDEX_op_xor_i32:
629 t0 = *tb_ptr++;
630 t1 = tci_read_r32(regs, &tb_ptr);
631 t2 = tci_read_r32(regs, &tb_ptr);
632 tci_write_reg(regs, t0, t1 ^ t2);
633 break;
635 /* Shift/rotate operations (32 bit). */
637 case INDEX_op_shl_i32:
638 t0 = *tb_ptr++;
639 t1 = tci_read_r32(regs, &tb_ptr);
640 t2 = tci_read_r32(regs, &tb_ptr);
641 tci_write_reg(regs, t0, t1 << (t2 & 31));
642 break;
643 case INDEX_op_shr_i32:
644 t0 = *tb_ptr++;
645 t1 = tci_read_r32(regs, &tb_ptr);
646 t2 = tci_read_r32(regs, &tb_ptr);
647 tci_write_reg(regs, t0, t1 >> (t2 & 31));
648 break;
649 case INDEX_op_sar_i32:
650 t0 = *tb_ptr++;
651 t1 = tci_read_r32(regs, &tb_ptr);
652 t2 = tci_read_r32(regs, &tb_ptr);
653 tci_write_reg(regs, t0, ((int32_t)t1 >> (t2 & 31)));
654 break;
655 #if TCG_TARGET_HAS_rot_i32
656 case INDEX_op_rotl_i32:
657 t0 = *tb_ptr++;
658 t1 = tci_read_r32(regs, &tb_ptr);
659 t2 = tci_read_r32(regs, &tb_ptr);
660 tci_write_reg(regs, t0, rol32(t1, t2 & 31));
661 break;
662 case INDEX_op_rotr_i32:
663 t0 = *tb_ptr++;
664 t1 = tci_read_r32(regs, &tb_ptr);
665 t2 = tci_read_r32(regs, &tb_ptr);
666 tci_write_reg(regs, t0, ror32(t1, t2 & 31));
667 break;
668 #endif
669 #if TCG_TARGET_HAS_deposit_i32
670 case INDEX_op_deposit_i32:
671 t0 = *tb_ptr++;
672 t1 = tci_read_r32(regs, &tb_ptr);
673 t2 = tci_read_r32(regs, &tb_ptr);
674 tmp16 = *tb_ptr++;
675 tmp8 = *tb_ptr++;
676 tmp32 = (((1 << tmp8) - 1) << tmp16);
677 tci_write_reg(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
678 break;
679 #endif
680 case INDEX_op_brcond_i32:
681 t0 = tci_read_r32(regs, &tb_ptr);
682 t1 = tci_read_r32(regs, &tb_ptr);
683 condition = *tb_ptr++;
684 label = tci_read_label(&tb_ptr);
685 if (tci_compare32(t0, t1, condition)) {
686 tci_assert(tb_ptr == old_code_ptr + op_size);
687 tb_ptr = (uint8_t *)label;
688 continue;
690 break;
691 #if TCG_TARGET_REG_BITS == 32
692 case INDEX_op_add2_i32:
693 t0 = *tb_ptr++;
694 t1 = *tb_ptr++;
695 tmp64 = tci_read_r64(regs, &tb_ptr);
696 tmp64 += tci_read_r64(regs, &tb_ptr);
697 tci_write_reg64(regs, t1, t0, tmp64);
698 break;
699 case INDEX_op_sub2_i32:
700 t0 = *tb_ptr++;
701 t1 = *tb_ptr++;
702 tmp64 = tci_read_r64(regs, &tb_ptr);
703 tmp64 -= tci_read_r64(regs, &tb_ptr);
704 tci_write_reg64(regs, t1, t0, tmp64);
705 break;
706 case INDEX_op_brcond2_i32:
707 tmp64 = tci_read_r64(regs, &tb_ptr);
708 v64 = tci_read_r64(regs, &tb_ptr);
709 condition = *tb_ptr++;
710 label = tci_read_label(&tb_ptr);
711 if (tci_compare64(tmp64, v64, condition)) {
712 tci_assert(tb_ptr == old_code_ptr + op_size);
713 tb_ptr = (uint8_t *)label;
714 continue;
716 break;
717 case INDEX_op_mulu2_i32:
718 t0 = *tb_ptr++;
719 t1 = *tb_ptr++;
720 t2 = tci_read_r32(regs, &tb_ptr);
721 tmp64 = tci_read_r32(regs, &tb_ptr);
722 tci_write_reg64(regs, t1, t0, t2 * tmp64);
723 break;
724 #endif /* TCG_TARGET_REG_BITS == 32 */
725 #if TCG_TARGET_HAS_ext8s_i32
726 case INDEX_op_ext8s_i32:
727 t0 = *tb_ptr++;
728 t1 = tci_read_r8s(regs, &tb_ptr);
729 tci_write_reg(regs, t0, t1);
730 break;
731 #endif
732 #if TCG_TARGET_HAS_ext16s_i32
733 case INDEX_op_ext16s_i32:
734 t0 = *tb_ptr++;
735 t1 = tci_read_r16s(regs, &tb_ptr);
736 tci_write_reg(regs, t0, t1);
737 break;
738 #endif
739 #if TCG_TARGET_HAS_ext8u_i32
740 case INDEX_op_ext8u_i32:
741 t0 = *tb_ptr++;
742 t1 = tci_read_r8(regs, &tb_ptr);
743 tci_write_reg(regs, t0, t1);
744 break;
745 #endif
746 #if TCG_TARGET_HAS_ext16u_i32
747 case INDEX_op_ext16u_i32:
748 t0 = *tb_ptr++;
749 t1 = tci_read_r16(regs, &tb_ptr);
750 tci_write_reg(regs, t0, t1);
751 break;
752 #endif
753 #if TCG_TARGET_HAS_bswap16_i32
754 case INDEX_op_bswap16_i32:
755 t0 = *tb_ptr++;
756 t1 = tci_read_r16(regs, &tb_ptr);
757 tci_write_reg(regs, t0, bswap16(t1));
758 break;
759 #endif
760 #if TCG_TARGET_HAS_bswap32_i32
761 case INDEX_op_bswap32_i32:
762 t0 = *tb_ptr++;
763 t1 = tci_read_r32(regs, &tb_ptr);
764 tci_write_reg(regs, t0, bswap32(t1));
765 break;
766 #endif
767 #if TCG_TARGET_HAS_not_i32
768 case INDEX_op_not_i32:
769 t0 = *tb_ptr++;
770 t1 = tci_read_r32(regs, &tb_ptr);
771 tci_write_reg(regs, t0, ~t1);
772 break;
773 #endif
774 #if TCG_TARGET_HAS_neg_i32
775 case INDEX_op_neg_i32:
776 t0 = *tb_ptr++;
777 t1 = tci_read_r32(regs, &tb_ptr);
778 tci_write_reg(regs, t0, -t1);
779 break;
780 #endif
781 #if TCG_TARGET_REG_BITS == 64
782 case INDEX_op_mov_i64:
783 t0 = *tb_ptr++;
784 t1 = tci_read_r64(regs, &tb_ptr);
785 tci_write_reg(regs, t0, t1);
786 break;
787 case INDEX_op_tci_movi_i64:
788 t0 = *tb_ptr++;
789 t1 = tci_read_i64(&tb_ptr);
790 tci_write_reg(regs, t0, t1);
791 break;
793 /* Load/store operations (64 bit). */
795 case INDEX_op_ld32s_i64:
796 t0 = *tb_ptr++;
797 t1 = tci_read_r(regs, &tb_ptr);
798 t2 = tci_read_s32(&tb_ptr);
799 tci_write_reg(regs, t0, *(int32_t *)(t1 + t2));
800 break;
801 case INDEX_op_ld_i64:
802 t0 = *tb_ptr++;
803 t1 = tci_read_r(regs, &tb_ptr);
804 t2 = tci_read_s32(&tb_ptr);
805 tci_write_reg(regs, t0, *(uint64_t *)(t1 + t2));
806 break;
807 case INDEX_op_st_i64:
808 t0 = tci_read_r64(regs, &tb_ptr);
809 t1 = tci_read_r(regs, &tb_ptr);
810 t2 = tci_read_s32(&tb_ptr);
811 *(uint64_t *)(t1 + t2) = t0;
812 break;
814 /* Arithmetic operations (64 bit). */
816 case INDEX_op_add_i64:
817 t0 = *tb_ptr++;
818 t1 = tci_read_r64(regs, &tb_ptr);
819 t2 = tci_read_r64(regs, &tb_ptr);
820 tci_write_reg(regs, t0, t1 + t2);
821 break;
822 case INDEX_op_sub_i64:
823 t0 = *tb_ptr++;
824 t1 = tci_read_r64(regs, &tb_ptr);
825 t2 = tci_read_r64(regs, &tb_ptr);
826 tci_write_reg(regs, t0, t1 - t2);
827 break;
828 case INDEX_op_mul_i64:
829 t0 = *tb_ptr++;
830 t1 = tci_read_r64(regs, &tb_ptr);
831 t2 = tci_read_r64(regs, &tb_ptr);
832 tci_write_reg(regs, t0, t1 * t2);
833 break;
834 case INDEX_op_div_i64:
835 t0 = *tb_ptr++;
836 t1 = tci_read_r64(regs, &tb_ptr);
837 t2 = tci_read_r64(regs, &tb_ptr);
838 tci_write_reg(regs, t0, (int64_t)t1 / (int64_t)t2);
839 break;
840 case INDEX_op_divu_i64:
841 t0 = *tb_ptr++;
842 t1 = tci_read_r64(regs, &tb_ptr);
843 t2 = tci_read_r64(regs, &tb_ptr);
844 tci_write_reg(regs, t0, (uint64_t)t1 / (uint64_t)t2);
845 break;
846 case INDEX_op_rem_i64:
847 t0 = *tb_ptr++;
848 t1 = tci_read_r64(regs, &tb_ptr);
849 t2 = tci_read_r64(regs, &tb_ptr);
850 tci_write_reg(regs, t0, (int64_t)t1 % (int64_t)t2);
851 break;
852 case INDEX_op_remu_i64:
853 t0 = *tb_ptr++;
854 t1 = tci_read_r64(regs, &tb_ptr);
855 t2 = tci_read_r64(regs, &tb_ptr);
856 tci_write_reg(regs, t0, (uint64_t)t1 % (uint64_t)t2);
857 break;
858 case INDEX_op_and_i64:
859 t0 = *tb_ptr++;
860 t1 = tci_read_r64(regs, &tb_ptr);
861 t2 = tci_read_r64(regs, &tb_ptr);
862 tci_write_reg(regs, t0, t1 & t2);
863 break;
864 case INDEX_op_or_i64:
865 t0 = *tb_ptr++;
866 t1 = tci_read_r64(regs, &tb_ptr);
867 t2 = tci_read_r64(regs, &tb_ptr);
868 tci_write_reg(regs, t0, t1 | t2);
869 break;
870 case INDEX_op_xor_i64:
871 t0 = *tb_ptr++;
872 t1 = tci_read_r64(regs, &tb_ptr);
873 t2 = tci_read_r64(regs, &tb_ptr);
874 tci_write_reg(regs, t0, t1 ^ t2);
875 break;
877 /* Shift/rotate operations (64 bit). */
879 case INDEX_op_shl_i64:
880 t0 = *tb_ptr++;
881 t1 = tci_read_r64(regs, &tb_ptr);
882 t2 = tci_read_r64(regs, &tb_ptr);
883 tci_write_reg(regs, t0, t1 << (t2 & 63));
884 break;
885 case INDEX_op_shr_i64:
886 t0 = *tb_ptr++;
887 t1 = tci_read_r64(regs, &tb_ptr);
888 t2 = tci_read_r64(regs, &tb_ptr);
889 tci_write_reg(regs, t0, t1 >> (t2 & 63));
890 break;
891 case INDEX_op_sar_i64:
892 t0 = *tb_ptr++;
893 t1 = tci_read_r64(regs, &tb_ptr);
894 t2 = tci_read_r64(regs, &tb_ptr);
895 tci_write_reg(regs, t0, ((int64_t)t1 >> (t2 & 63)));
896 break;
897 #if TCG_TARGET_HAS_rot_i64
898 case INDEX_op_rotl_i64:
899 t0 = *tb_ptr++;
900 t1 = tci_read_r64(regs, &tb_ptr);
901 t2 = tci_read_r64(regs, &tb_ptr);
902 tci_write_reg(regs, t0, rol64(t1, t2 & 63));
903 break;
904 case INDEX_op_rotr_i64:
905 t0 = *tb_ptr++;
906 t1 = tci_read_r64(regs, &tb_ptr);
907 t2 = tci_read_r64(regs, &tb_ptr);
908 tci_write_reg(regs, t0, ror64(t1, t2 & 63));
909 break;
910 #endif
911 #if TCG_TARGET_HAS_deposit_i64
912 case INDEX_op_deposit_i64:
913 t0 = *tb_ptr++;
914 t1 = tci_read_r64(regs, &tb_ptr);
915 t2 = tci_read_r64(regs, &tb_ptr);
916 tmp16 = *tb_ptr++;
917 tmp8 = *tb_ptr++;
918 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
919 tci_write_reg(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
920 break;
921 #endif
922 case INDEX_op_brcond_i64:
923 t0 = tci_read_r64(regs, &tb_ptr);
924 t1 = tci_read_r64(regs, &tb_ptr);
925 condition = *tb_ptr++;
926 label = tci_read_label(&tb_ptr);
927 if (tci_compare64(t0, t1, condition)) {
928 tci_assert(tb_ptr == old_code_ptr + op_size);
929 tb_ptr = (uint8_t *)label;
930 continue;
932 break;
933 #if TCG_TARGET_HAS_ext8u_i64
934 case INDEX_op_ext8u_i64:
935 t0 = *tb_ptr++;
936 t1 = tci_read_r8(regs, &tb_ptr);
937 tci_write_reg(regs, t0, t1);
938 break;
939 #endif
940 #if TCG_TARGET_HAS_ext8s_i64
941 case INDEX_op_ext8s_i64:
942 t0 = *tb_ptr++;
943 t1 = tci_read_r8s(regs, &tb_ptr);
944 tci_write_reg(regs, t0, t1);
945 break;
946 #endif
947 #if TCG_TARGET_HAS_ext16s_i64
948 case INDEX_op_ext16s_i64:
949 t0 = *tb_ptr++;
950 t1 = tci_read_r16s(regs, &tb_ptr);
951 tci_write_reg(regs, t0, t1);
952 break;
953 #endif
954 #if TCG_TARGET_HAS_ext16u_i64
955 case INDEX_op_ext16u_i64:
956 t0 = *tb_ptr++;
957 t1 = tci_read_r16(regs, &tb_ptr);
958 tci_write_reg(regs, t0, t1);
959 break;
960 #endif
961 #if TCG_TARGET_HAS_ext32s_i64
962 case INDEX_op_ext32s_i64:
963 #endif
964 case INDEX_op_ext_i32_i64:
965 t0 = *tb_ptr++;
966 t1 = tci_read_r32s(regs, &tb_ptr);
967 tci_write_reg(regs, t0, t1);
968 break;
969 #if TCG_TARGET_HAS_ext32u_i64
970 case INDEX_op_ext32u_i64:
971 #endif
972 case INDEX_op_extu_i32_i64:
973 t0 = *tb_ptr++;
974 t1 = tci_read_r32(regs, &tb_ptr);
975 tci_write_reg(regs, t0, t1);
976 break;
977 #if TCG_TARGET_HAS_bswap16_i64
978 case INDEX_op_bswap16_i64:
979 t0 = *tb_ptr++;
980 t1 = tci_read_r16(regs, &tb_ptr);
981 tci_write_reg(regs, t0, bswap16(t1));
982 break;
983 #endif
984 #if TCG_TARGET_HAS_bswap32_i64
985 case INDEX_op_bswap32_i64:
986 t0 = *tb_ptr++;
987 t1 = tci_read_r32(regs, &tb_ptr);
988 tci_write_reg(regs, t0, bswap32(t1));
989 break;
990 #endif
991 #if TCG_TARGET_HAS_bswap64_i64
992 case INDEX_op_bswap64_i64:
993 t0 = *tb_ptr++;
994 t1 = tci_read_r64(regs, &tb_ptr);
995 tci_write_reg(regs, t0, bswap64(t1));
996 break;
997 #endif
998 #if TCG_TARGET_HAS_not_i64
999 case INDEX_op_not_i64:
1000 t0 = *tb_ptr++;
1001 t1 = tci_read_r64(regs, &tb_ptr);
1002 tci_write_reg(regs, t0, ~t1);
1003 break;
1004 #endif
1005 #if TCG_TARGET_HAS_neg_i64
1006 case INDEX_op_neg_i64:
1007 t0 = *tb_ptr++;
1008 t1 = tci_read_r64(regs, &tb_ptr);
1009 tci_write_reg(regs, t0, -t1);
1010 break;
1011 #endif
1012 #endif /* TCG_TARGET_REG_BITS == 64 */
1014 /* QEMU specific operations. */
1016 case INDEX_op_exit_tb:
1017 ret = *(uint64_t *)tb_ptr;
1018 goto exit;
1019 break;
1020 case INDEX_op_goto_tb:
1021 /* Jump address is aligned */
1022 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1023 t0 = qatomic_read((int32_t *)tb_ptr);
1024 tb_ptr += sizeof(int32_t);
1025 tci_assert(tb_ptr == old_code_ptr + op_size);
1026 tb_ptr += (int32_t)t0;
1027 continue;
1028 case INDEX_op_qemu_ld_i32:
1029 t0 = *tb_ptr++;
1030 taddr = tci_read_ulong(regs, &tb_ptr);
1031 oi = tci_read_i(&tb_ptr);
1032 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1033 case MO_UB:
1034 tmp32 = qemu_ld_ub;
1035 break;
1036 case MO_SB:
1037 tmp32 = (int8_t)qemu_ld_ub;
1038 break;
1039 case MO_LEUW:
1040 tmp32 = qemu_ld_leuw;
1041 break;
1042 case MO_LESW:
1043 tmp32 = (int16_t)qemu_ld_leuw;
1044 break;
1045 case MO_LEUL:
1046 tmp32 = qemu_ld_leul;
1047 break;
1048 case MO_BEUW:
1049 tmp32 = qemu_ld_beuw;
1050 break;
1051 case MO_BESW:
1052 tmp32 = (int16_t)qemu_ld_beuw;
1053 break;
1054 case MO_BEUL:
1055 tmp32 = qemu_ld_beul;
1056 break;
1057 default:
1058 g_assert_not_reached();
1060 tci_write_reg(regs, t0, tmp32);
1061 break;
1062 case INDEX_op_qemu_ld_i64:
1063 t0 = *tb_ptr++;
1064 if (TCG_TARGET_REG_BITS == 32) {
1065 t1 = *tb_ptr++;
1067 taddr = tci_read_ulong(regs, &tb_ptr);
1068 oi = tci_read_i(&tb_ptr);
1069 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
1070 case MO_UB:
1071 tmp64 = qemu_ld_ub;
1072 break;
1073 case MO_SB:
1074 tmp64 = (int8_t)qemu_ld_ub;
1075 break;
1076 case MO_LEUW:
1077 tmp64 = qemu_ld_leuw;
1078 break;
1079 case MO_LESW:
1080 tmp64 = (int16_t)qemu_ld_leuw;
1081 break;
1082 case MO_LEUL:
1083 tmp64 = qemu_ld_leul;
1084 break;
1085 case MO_LESL:
1086 tmp64 = (int32_t)qemu_ld_leul;
1087 break;
1088 case MO_LEQ:
1089 tmp64 = qemu_ld_leq;
1090 break;
1091 case MO_BEUW:
1092 tmp64 = qemu_ld_beuw;
1093 break;
1094 case MO_BESW:
1095 tmp64 = (int16_t)qemu_ld_beuw;
1096 break;
1097 case MO_BEUL:
1098 tmp64 = qemu_ld_beul;
1099 break;
1100 case MO_BESL:
1101 tmp64 = (int32_t)qemu_ld_beul;
1102 break;
1103 case MO_BEQ:
1104 tmp64 = qemu_ld_beq;
1105 break;
1106 default:
1107 g_assert_not_reached();
1109 tci_write_reg(regs, t0, tmp64);
1110 if (TCG_TARGET_REG_BITS == 32) {
1111 tci_write_reg(regs, t1, tmp64 >> 32);
1113 break;
1114 case INDEX_op_qemu_st_i32:
1115 t0 = tci_read_r(regs, &tb_ptr);
1116 taddr = tci_read_ulong(regs, &tb_ptr);
1117 oi = tci_read_i(&tb_ptr);
1118 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1119 case MO_UB:
1120 qemu_st_b(t0);
1121 break;
1122 case MO_LEUW:
1123 qemu_st_lew(t0);
1124 break;
1125 case MO_LEUL:
1126 qemu_st_lel(t0);
1127 break;
1128 case MO_BEUW:
1129 qemu_st_bew(t0);
1130 break;
1131 case MO_BEUL:
1132 qemu_st_bel(t0);
1133 break;
1134 default:
1135 g_assert_not_reached();
1137 break;
1138 case INDEX_op_qemu_st_i64:
1139 tmp64 = tci_read_r64(regs, &tb_ptr);
1140 taddr = tci_read_ulong(regs, &tb_ptr);
1141 oi = tci_read_i(&tb_ptr);
1142 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
1143 case MO_UB:
1144 qemu_st_b(tmp64);
1145 break;
1146 case MO_LEUW:
1147 qemu_st_lew(tmp64);
1148 break;
1149 case MO_LEUL:
1150 qemu_st_lel(tmp64);
1151 break;
1152 case MO_LEQ:
1153 qemu_st_leq(tmp64);
1154 break;
1155 case MO_BEUW:
1156 qemu_st_bew(tmp64);
1157 break;
1158 case MO_BEUL:
1159 qemu_st_bel(tmp64);
1160 break;
1161 case MO_BEQ:
1162 qemu_st_beq(tmp64);
1163 break;
1164 default:
1165 g_assert_not_reached();
1167 break;
1168 case INDEX_op_mb:
1169 /* Ensure ordering for all kinds */
1170 smp_mb();
1171 break;
1172 default:
1173 g_assert_not_reached();
1175 tci_assert(tb_ptr == old_code_ptr + op_size);
1177 exit:
1178 return ret;