target-i386: commonize checks for PAE and non-PAE
[qemu.git] / tci.c
blob0acf1a177e9739ee69af958cd2c910f68a65b037
1 /*
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "config.h"
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
27 #include "qemu-common.h"
28 #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
29 #include "exec/cpu_ldst.h"
30 #include "tcg-op.h"
32 /* Marker for missing code. */
33 #define TODO() \
34 do { \
35 fprintf(stderr, "TODO %s:%u: %s()\n", \
36 __FILE__, __LINE__, __func__); \
37 tcg_abort(); \
38 } while (0)
40 #if MAX_OPC_PARAM_IARGS != 5
41 # error Fix needed, number of supported input arguments changed!
42 #endif
43 #if TCG_TARGET_REG_BITS == 32
44 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
45 tcg_target_ulong, tcg_target_ulong,
46 tcg_target_ulong, tcg_target_ulong,
47 tcg_target_ulong, tcg_target_ulong,
48 tcg_target_ulong, tcg_target_ulong);
49 #else
50 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong,
52 tcg_target_ulong);
53 #endif
55 /* Targets which don't use GETPC also don't need tci_tb_ptr
56 which makes them a little faster. */
57 #if defined(GETPC)
58 uintptr_t tci_tb_ptr;
59 #endif
61 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
63 static tcg_target_ulong tci_read_reg(TCGReg index)
65 assert(index < ARRAY_SIZE(tci_reg));
66 return tci_reg[index];
69 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
70 static int8_t tci_read_reg8s(TCGReg index)
72 return (int8_t)tci_read_reg(index);
74 #endif
76 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
77 static int16_t tci_read_reg16s(TCGReg index)
79 return (int16_t)tci_read_reg(index);
81 #endif
83 #if TCG_TARGET_REG_BITS == 64
84 static int32_t tci_read_reg32s(TCGReg index)
86 return (int32_t)tci_read_reg(index);
88 #endif
90 static uint8_t tci_read_reg8(TCGReg index)
92 return (uint8_t)tci_read_reg(index);
95 static uint16_t tci_read_reg16(TCGReg index)
97 return (uint16_t)tci_read_reg(index);
100 static uint32_t tci_read_reg32(TCGReg index)
102 return (uint32_t)tci_read_reg(index);
105 #if TCG_TARGET_REG_BITS == 64
106 static uint64_t tci_read_reg64(TCGReg index)
108 return tci_read_reg(index);
110 #endif
112 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
114 assert(index < ARRAY_SIZE(tci_reg));
115 assert(index != TCG_AREG0);
116 assert(index != TCG_REG_CALL_STACK);
117 tci_reg[index] = value;
120 static void tci_write_reg8s(TCGReg index, int8_t value)
122 tci_write_reg(index, value);
125 static void tci_write_reg16s(TCGReg index, int16_t value)
127 tci_write_reg(index, value);
130 #if TCG_TARGET_REG_BITS == 64
131 static void tci_write_reg32s(TCGReg index, int32_t value)
133 tci_write_reg(index, value);
135 #endif
137 static void tci_write_reg8(TCGReg index, uint8_t value)
139 tci_write_reg(index, value);
142 static void tci_write_reg16(TCGReg index, uint16_t value)
144 tci_write_reg(index, value);
147 static void tci_write_reg32(TCGReg index, uint32_t value)
149 tci_write_reg(index, value);
152 #if TCG_TARGET_REG_BITS == 32
153 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
154 uint64_t value)
156 tci_write_reg(low_index, value);
157 tci_write_reg(high_index, value >> 32);
159 #elif TCG_TARGET_REG_BITS == 64
160 static void tci_write_reg64(TCGReg index, uint64_t value)
162 tci_write_reg(index, value);
164 #endif
166 #if TCG_TARGET_REG_BITS == 32
167 /* Create a 64 bit value from two 32 bit values. */
168 static uint64_t tci_uint64(uint32_t high, uint32_t low)
170 return ((uint64_t)high << 32) + low;
172 #endif
174 /* Read constant (native size) from bytecode. */
175 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
177 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
178 *tb_ptr += sizeof(value);
179 return value;
182 /* Read unsigned constant (32 bit) from bytecode. */
183 static uint32_t tci_read_i32(uint8_t **tb_ptr)
185 uint32_t value = *(uint32_t *)(*tb_ptr);
186 *tb_ptr += sizeof(value);
187 return value;
190 /* Read signed constant (32 bit) from bytecode. */
191 static int32_t tci_read_s32(uint8_t **tb_ptr)
193 int32_t value = *(int32_t *)(*tb_ptr);
194 *tb_ptr += sizeof(value);
195 return value;
198 #if TCG_TARGET_REG_BITS == 64
199 /* Read constant (64 bit) from bytecode. */
200 static uint64_t tci_read_i64(uint8_t **tb_ptr)
202 uint64_t value = *(uint64_t *)(*tb_ptr);
203 *tb_ptr += sizeof(value);
204 return value;
206 #endif
208 /* Read indexed register (native size) from bytecode. */
209 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
211 tcg_target_ulong value = tci_read_reg(**tb_ptr);
212 *tb_ptr += 1;
213 return value;
216 /* Read indexed register (8 bit) from bytecode. */
217 static uint8_t tci_read_r8(uint8_t **tb_ptr)
219 uint8_t value = tci_read_reg8(**tb_ptr);
220 *tb_ptr += 1;
221 return value;
224 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
225 /* Read indexed register (8 bit signed) from bytecode. */
226 static int8_t tci_read_r8s(uint8_t **tb_ptr)
228 int8_t value = tci_read_reg8s(**tb_ptr);
229 *tb_ptr += 1;
230 return value;
232 #endif
234 /* Read indexed register (16 bit) from bytecode. */
235 static uint16_t tci_read_r16(uint8_t **tb_ptr)
237 uint16_t value = tci_read_reg16(**tb_ptr);
238 *tb_ptr += 1;
239 return value;
242 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
243 /* Read indexed register (16 bit signed) from bytecode. */
244 static int16_t tci_read_r16s(uint8_t **tb_ptr)
246 int16_t value = tci_read_reg16s(**tb_ptr);
247 *tb_ptr += 1;
248 return value;
250 #endif
252 /* Read indexed register (32 bit) from bytecode. */
253 static uint32_t tci_read_r32(uint8_t **tb_ptr)
255 uint32_t value = tci_read_reg32(**tb_ptr);
256 *tb_ptr += 1;
257 return value;
260 #if TCG_TARGET_REG_BITS == 32
261 /* Read two indexed registers (2 * 32 bit) from bytecode. */
262 static uint64_t tci_read_r64(uint8_t **tb_ptr)
264 uint32_t low = tci_read_r32(tb_ptr);
265 return tci_uint64(tci_read_r32(tb_ptr), low);
267 #elif TCG_TARGET_REG_BITS == 64
268 /* Read indexed register (32 bit signed) from bytecode. */
269 static int32_t tci_read_r32s(uint8_t **tb_ptr)
271 int32_t value = tci_read_reg32s(**tb_ptr);
272 *tb_ptr += 1;
273 return value;
276 /* Read indexed register (64 bit) from bytecode. */
277 static uint64_t tci_read_r64(uint8_t **tb_ptr)
279 uint64_t value = tci_read_reg64(**tb_ptr);
280 *tb_ptr += 1;
281 return value;
283 #endif
285 /* Read indexed register(s) with target address from bytecode. */
286 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
288 target_ulong taddr = tci_read_r(tb_ptr);
289 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
290 taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
291 #endif
292 return taddr;
295 /* Read indexed register or constant (native size) from bytecode. */
296 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
298 tcg_target_ulong value;
299 TCGReg r = **tb_ptr;
300 *tb_ptr += 1;
301 if (r == TCG_CONST) {
302 value = tci_read_i(tb_ptr);
303 } else {
304 value = tci_read_reg(r);
306 return value;
309 /* Read indexed register or constant (32 bit) from bytecode. */
310 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
312 uint32_t value;
313 TCGReg r = **tb_ptr;
314 *tb_ptr += 1;
315 if (r == TCG_CONST) {
316 value = tci_read_i32(tb_ptr);
317 } else {
318 value = tci_read_reg32(r);
320 return value;
323 #if TCG_TARGET_REG_BITS == 32
324 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
325 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
327 uint32_t low = tci_read_ri32(tb_ptr);
328 return tci_uint64(tci_read_ri32(tb_ptr), low);
330 #elif TCG_TARGET_REG_BITS == 64
331 /* Read indexed register or constant (64 bit) from bytecode. */
332 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
334 uint64_t value;
335 TCGReg r = **tb_ptr;
336 *tb_ptr += 1;
337 if (r == TCG_CONST) {
338 value = tci_read_i64(tb_ptr);
339 } else {
340 value = tci_read_reg64(r);
342 return value;
344 #endif
346 static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
348 tcg_target_ulong label = tci_read_i(tb_ptr);
349 assert(label != 0);
350 return label;
353 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
355 bool result = false;
356 int32_t i0 = u0;
357 int32_t i1 = u1;
358 switch (condition) {
359 case TCG_COND_EQ:
360 result = (u0 == u1);
361 break;
362 case TCG_COND_NE:
363 result = (u0 != u1);
364 break;
365 case TCG_COND_LT:
366 result = (i0 < i1);
367 break;
368 case TCG_COND_GE:
369 result = (i0 >= i1);
370 break;
371 case TCG_COND_LE:
372 result = (i0 <= i1);
373 break;
374 case TCG_COND_GT:
375 result = (i0 > i1);
376 break;
377 case TCG_COND_LTU:
378 result = (u0 < u1);
379 break;
380 case TCG_COND_GEU:
381 result = (u0 >= u1);
382 break;
383 case TCG_COND_LEU:
384 result = (u0 <= u1);
385 break;
386 case TCG_COND_GTU:
387 result = (u0 > u1);
388 break;
389 default:
390 TODO();
392 return result;
395 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
397 bool result = false;
398 int64_t i0 = u0;
399 int64_t i1 = u1;
400 switch (condition) {
401 case TCG_COND_EQ:
402 result = (u0 == u1);
403 break;
404 case TCG_COND_NE:
405 result = (u0 != u1);
406 break;
407 case TCG_COND_LT:
408 result = (i0 < i1);
409 break;
410 case TCG_COND_GE:
411 result = (i0 >= i1);
412 break;
413 case TCG_COND_LE:
414 result = (i0 <= i1);
415 break;
416 case TCG_COND_GT:
417 result = (i0 > i1);
418 break;
419 case TCG_COND_LTU:
420 result = (u0 < u1);
421 break;
422 case TCG_COND_GEU:
423 result = (u0 >= u1);
424 break;
425 case TCG_COND_LEU:
426 result = (u0 <= u1);
427 break;
428 case TCG_COND_GTU:
429 result = (u0 > u1);
430 break;
431 default:
432 TODO();
434 return result;
437 /* Interpret pseudo code in tb. */
438 uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
440 long tcg_temps[CPU_TEMP_BUF_NLONGS];
441 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
442 uintptr_t next_tb = 0;
444 tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
445 tci_reg[TCG_REG_CALL_STACK] = sp_value;
446 assert(tb_ptr);
448 for (;;) {
449 TCGOpcode opc = tb_ptr[0];
450 #if !defined(NDEBUG)
451 uint8_t op_size = tb_ptr[1];
452 uint8_t *old_code_ptr = tb_ptr;
453 #endif
454 tcg_target_ulong t0;
455 tcg_target_ulong t1;
456 tcg_target_ulong t2;
457 tcg_target_ulong label;
458 TCGCond condition;
459 target_ulong taddr;
460 #ifndef CONFIG_SOFTMMU
461 tcg_target_ulong host_addr;
462 #endif
463 uint8_t tmp8;
464 uint16_t tmp16;
465 uint32_t tmp32;
466 uint64_t tmp64;
467 #if TCG_TARGET_REG_BITS == 32
468 uint64_t v64;
469 #endif
471 #if defined(GETPC)
472 tci_tb_ptr = (uintptr_t)tb_ptr;
473 #endif
475 /* Skip opcode and size entry. */
476 tb_ptr += 2;
478 switch (opc) {
479 case INDEX_op_end:
480 case INDEX_op_nop:
481 break;
482 case INDEX_op_nop1:
483 case INDEX_op_nop2:
484 case INDEX_op_nop3:
485 case INDEX_op_nopn:
486 case INDEX_op_discard:
487 TODO();
488 break;
489 case INDEX_op_set_label:
490 TODO();
491 break;
492 case INDEX_op_call:
493 t0 = tci_read_ri(&tb_ptr);
494 #if TCG_TARGET_REG_BITS == 32
495 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
496 tci_read_reg(TCG_REG_R1),
497 tci_read_reg(TCG_REG_R2),
498 tci_read_reg(TCG_REG_R3),
499 tci_read_reg(TCG_REG_R5),
500 tci_read_reg(TCG_REG_R6),
501 tci_read_reg(TCG_REG_R7),
502 tci_read_reg(TCG_REG_R8),
503 tci_read_reg(TCG_REG_R9),
504 tci_read_reg(TCG_REG_R10));
505 tci_write_reg(TCG_REG_R0, tmp64);
506 tci_write_reg(TCG_REG_R1, tmp64 >> 32);
507 #else
508 tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
509 tci_read_reg(TCG_REG_R1),
510 tci_read_reg(TCG_REG_R2),
511 tci_read_reg(TCG_REG_R3),
512 tci_read_reg(TCG_REG_R5));
513 tci_write_reg(TCG_REG_R0, tmp64);
514 #endif
515 break;
516 case INDEX_op_br:
517 label = tci_read_label(&tb_ptr);
518 assert(tb_ptr == old_code_ptr + op_size);
519 tb_ptr = (uint8_t *)label;
520 continue;
521 case INDEX_op_setcond_i32:
522 t0 = *tb_ptr++;
523 t1 = tci_read_r32(&tb_ptr);
524 t2 = tci_read_ri32(&tb_ptr);
525 condition = *tb_ptr++;
526 tci_write_reg32(t0, tci_compare32(t1, t2, condition));
527 break;
528 #if TCG_TARGET_REG_BITS == 32
529 case INDEX_op_setcond2_i32:
530 t0 = *tb_ptr++;
531 tmp64 = tci_read_r64(&tb_ptr);
532 v64 = tci_read_ri64(&tb_ptr);
533 condition = *tb_ptr++;
534 tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
535 break;
536 #elif TCG_TARGET_REG_BITS == 64
537 case INDEX_op_setcond_i64:
538 t0 = *tb_ptr++;
539 t1 = tci_read_r64(&tb_ptr);
540 t2 = tci_read_ri64(&tb_ptr);
541 condition = *tb_ptr++;
542 tci_write_reg64(t0, tci_compare64(t1, t2, condition));
543 break;
544 #endif
545 case INDEX_op_mov_i32:
546 t0 = *tb_ptr++;
547 t1 = tci_read_r32(&tb_ptr);
548 tci_write_reg32(t0, t1);
549 break;
550 case INDEX_op_movi_i32:
551 t0 = *tb_ptr++;
552 t1 = tci_read_i32(&tb_ptr);
553 tci_write_reg32(t0, t1);
554 break;
556 /* Load/store operations (32 bit). */
558 case INDEX_op_ld8u_i32:
559 t0 = *tb_ptr++;
560 t1 = tci_read_r(&tb_ptr);
561 t2 = tci_read_s32(&tb_ptr);
562 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
563 break;
564 case INDEX_op_ld8s_i32:
565 case INDEX_op_ld16u_i32:
566 TODO();
567 break;
568 case INDEX_op_ld16s_i32:
569 TODO();
570 break;
571 case INDEX_op_ld_i32:
572 t0 = *tb_ptr++;
573 t1 = tci_read_r(&tb_ptr);
574 t2 = tci_read_s32(&tb_ptr);
575 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
576 break;
577 case INDEX_op_st8_i32:
578 t0 = tci_read_r8(&tb_ptr);
579 t1 = tci_read_r(&tb_ptr);
580 t2 = tci_read_s32(&tb_ptr);
581 *(uint8_t *)(t1 + t2) = t0;
582 break;
583 case INDEX_op_st16_i32:
584 t0 = tci_read_r16(&tb_ptr);
585 t1 = tci_read_r(&tb_ptr);
586 t2 = tci_read_s32(&tb_ptr);
587 *(uint16_t *)(t1 + t2) = t0;
588 break;
589 case INDEX_op_st_i32:
590 t0 = tci_read_r32(&tb_ptr);
591 t1 = tci_read_r(&tb_ptr);
592 t2 = tci_read_s32(&tb_ptr);
593 assert(t1 != sp_value || (int32_t)t2 < 0);
594 *(uint32_t *)(t1 + t2) = t0;
595 break;
597 /* Arithmetic operations (32 bit). */
599 case INDEX_op_add_i32:
600 t0 = *tb_ptr++;
601 t1 = tci_read_ri32(&tb_ptr);
602 t2 = tci_read_ri32(&tb_ptr);
603 tci_write_reg32(t0, t1 + t2);
604 break;
605 case INDEX_op_sub_i32:
606 t0 = *tb_ptr++;
607 t1 = tci_read_ri32(&tb_ptr);
608 t2 = tci_read_ri32(&tb_ptr);
609 tci_write_reg32(t0, t1 - t2);
610 break;
611 case INDEX_op_mul_i32:
612 t0 = *tb_ptr++;
613 t1 = tci_read_ri32(&tb_ptr);
614 t2 = tci_read_ri32(&tb_ptr);
615 tci_write_reg32(t0, t1 * t2);
616 break;
617 #if TCG_TARGET_HAS_div_i32
618 case INDEX_op_div_i32:
619 t0 = *tb_ptr++;
620 t1 = tci_read_ri32(&tb_ptr);
621 t2 = tci_read_ri32(&tb_ptr);
622 tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
623 break;
624 case INDEX_op_divu_i32:
625 t0 = *tb_ptr++;
626 t1 = tci_read_ri32(&tb_ptr);
627 t2 = tci_read_ri32(&tb_ptr);
628 tci_write_reg32(t0, t1 / t2);
629 break;
630 case INDEX_op_rem_i32:
631 t0 = *tb_ptr++;
632 t1 = tci_read_ri32(&tb_ptr);
633 t2 = tci_read_ri32(&tb_ptr);
634 tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
635 break;
636 case INDEX_op_remu_i32:
637 t0 = *tb_ptr++;
638 t1 = tci_read_ri32(&tb_ptr);
639 t2 = tci_read_ri32(&tb_ptr);
640 tci_write_reg32(t0, t1 % t2);
641 break;
642 #elif TCG_TARGET_HAS_div2_i32
643 case INDEX_op_div2_i32:
644 case INDEX_op_divu2_i32:
645 TODO();
646 break;
647 #endif
648 case INDEX_op_and_i32:
649 t0 = *tb_ptr++;
650 t1 = tci_read_ri32(&tb_ptr);
651 t2 = tci_read_ri32(&tb_ptr);
652 tci_write_reg32(t0, t1 & t2);
653 break;
654 case INDEX_op_or_i32:
655 t0 = *tb_ptr++;
656 t1 = tci_read_ri32(&tb_ptr);
657 t2 = tci_read_ri32(&tb_ptr);
658 tci_write_reg32(t0, t1 | t2);
659 break;
660 case INDEX_op_xor_i32:
661 t0 = *tb_ptr++;
662 t1 = tci_read_ri32(&tb_ptr);
663 t2 = tci_read_ri32(&tb_ptr);
664 tci_write_reg32(t0, t1 ^ t2);
665 break;
667 /* Shift/rotate operations (32 bit). */
669 case INDEX_op_shl_i32:
670 t0 = *tb_ptr++;
671 t1 = tci_read_ri32(&tb_ptr);
672 t2 = tci_read_ri32(&tb_ptr);
673 tci_write_reg32(t0, t1 << (t2 & 31));
674 break;
675 case INDEX_op_shr_i32:
676 t0 = *tb_ptr++;
677 t1 = tci_read_ri32(&tb_ptr);
678 t2 = tci_read_ri32(&tb_ptr);
679 tci_write_reg32(t0, t1 >> (t2 & 31));
680 break;
681 case INDEX_op_sar_i32:
682 t0 = *tb_ptr++;
683 t1 = tci_read_ri32(&tb_ptr);
684 t2 = tci_read_ri32(&tb_ptr);
685 tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
686 break;
687 #if TCG_TARGET_HAS_rot_i32
688 case INDEX_op_rotl_i32:
689 t0 = *tb_ptr++;
690 t1 = tci_read_ri32(&tb_ptr);
691 t2 = tci_read_ri32(&tb_ptr);
692 tci_write_reg32(t0, rol32(t1, t2 & 31));
693 break;
694 case INDEX_op_rotr_i32:
695 t0 = *tb_ptr++;
696 t1 = tci_read_ri32(&tb_ptr);
697 t2 = tci_read_ri32(&tb_ptr);
698 tci_write_reg32(t0, ror32(t1, t2 & 31));
699 break;
700 #endif
701 #if TCG_TARGET_HAS_deposit_i32
702 case INDEX_op_deposit_i32:
703 t0 = *tb_ptr++;
704 t1 = tci_read_r32(&tb_ptr);
705 t2 = tci_read_r32(&tb_ptr);
706 tmp16 = *tb_ptr++;
707 tmp8 = *tb_ptr++;
708 tmp32 = (((1 << tmp8) - 1) << tmp16);
709 tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
710 break;
711 #endif
712 case INDEX_op_brcond_i32:
713 t0 = tci_read_r32(&tb_ptr);
714 t1 = tci_read_ri32(&tb_ptr);
715 condition = *tb_ptr++;
716 label = tci_read_label(&tb_ptr);
717 if (tci_compare32(t0, t1, condition)) {
718 assert(tb_ptr == old_code_ptr + op_size);
719 tb_ptr = (uint8_t *)label;
720 continue;
722 break;
723 #if TCG_TARGET_REG_BITS == 32
724 case INDEX_op_add2_i32:
725 t0 = *tb_ptr++;
726 t1 = *tb_ptr++;
727 tmp64 = tci_read_r64(&tb_ptr);
728 tmp64 += tci_read_r64(&tb_ptr);
729 tci_write_reg64(t1, t0, tmp64);
730 break;
731 case INDEX_op_sub2_i32:
732 t0 = *tb_ptr++;
733 t1 = *tb_ptr++;
734 tmp64 = tci_read_r64(&tb_ptr);
735 tmp64 -= tci_read_r64(&tb_ptr);
736 tci_write_reg64(t1, t0, tmp64);
737 break;
738 case INDEX_op_brcond2_i32:
739 tmp64 = tci_read_r64(&tb_ptr);
740 v64 = tci_read_ri64(&tb_ptr);
741 condition = *tb_ptr++;
742 label = tci_read_label(&tb_ptr);
743 if (tci_compare64(tmp64, v64, condition)) {
744 assert(tb_ptr == old_code_ptr + op_size);
745 tb_ptr = (uint8_t *)label;
746 continue;
748 break;
749 case INDEX_op_mulu2_i32:
750 t0 = *tb_ptr++;
751 t1 = *tb_ptr++;
752 t2 = tci_read_r32(&tb_ptr);
753 tmp64 = tci_read_r32(&tb_ptr);
754 tci_write_reg64(t1, t0, t2 * tmp64);
755 break;
756 #endif /* TCG_TARGET_REG_BITS == 32 */
757 #if TCG_TARGET_HAS_ext8s_i32
758 case INDEX_op_ext8s_i32:
759 t0 = *tb_ptr++;
760 t1 = tci_read_r8s(&tb_ptr);
761 tci_write_reg32(t0, t1);
762 break;
763 #endif
764 #if TCG_TARGET_HAS_ext16s_i32
765 case INDEX_op_ext16s_i32:
766 t0 = *tb_ptr++;
767 t1 = tci_read_r16s(&tb_ptr);
768 tci_write_reg32(t0, t1);
769 break;
770 #endif
771 #if TCG_TARGET_HAS_ext8u_i32
772 case INDEX_op_ext8u_i32:
773 t0 = *tb_ptr++;
774 t1 = tci_read_r8(&tb_ptr);
775 tci_write_reg32(t0, t1);
776 break;
777 #endif
778 #if TCG_TARGET_HAS_ext16u_i32
779 case INDEX_op_ext16u_i32:
780 t0 = *tb_ptr++;
781 t1 = tci_read_r16(&tb_ptr);
782 tci_write_reg32(t0, t1);
783 break;
784 #endif
785 #if TCG_TARGET_HAS_bswap16_i32
786 case INDEX_op_bswap16_i32:
787 t0 = *tb_ptr++;
788 t1 = tci_read_r16(&tb_ptr);
789 tci_write_reg32(t0, bswap16(t1));
790 break;
791 #endif
792 #if TCG_TARGET_HAS_bswap32_i32
793 case INDEX_op_bswap32_i32:
794 t0 = *tb_ptr++;
795 t1 = tci_read_r32(&tb_ptr);
796 tci_write_reg32(t0, bswap32(t1));
797 break;
798 #endif
799 #if TCG_TARGET_HAS_not_i32
800 case INDEX_op_not_i32:
801 t0 = *tb_ptr++;
802 t1 = tci_read_r32(&tb_ptr);
803 tci_write_reg32(t0, ~t1);
804 break;
805 #endif
806 #if TCG_TARGET_HAS_neg_i32
807 case INDEX_op_neg_i32:
808 t0 = *tb_ptr++;
809 t1 = tci_read_r32(&tb_ptr);
810 tci_write_reg32(t0, -t1);
811 break;
812 #endif
813 #if TCG_TARGET_REG_BITS == 64
814 case INDEX_op_mov_i64:
815 t0 = *tb_ptr++;
816 t1 = tci_read_r64(&tb_ptr);
817 tci_write_reg64(t0, t1);
818 break;
819 case INDEX_op_movi_i64:
820 t0 = *tb_ptr++;
821 t1 = tci_read_i64(&tb_ptr);
822 tci_write_reg64(t0, t1);
823 break;
825 /* Load/store operations (64 bit). */
827 case INDEX_op_ld8u_i64:
828 t0 = *tb_ptr++;
829 t1 = tci_read_r(&tb_ptr);
830 t2 = tci_read_s32(&tb_ptr);
831 tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
832 break;
833 case INDEX_op_ld8s_i64:
834 case INDEX_op_ld16u_i64:
835 case INDEX_op_ld16s_i64:
836 TODO();
837 break;
838 case INDEX_op_ld32u_i64:
839 t0 = *tb_ptr++;
840 t1 = tci_read_r(&tb_ptr);
841 t2 = tci_read_s32(&tb_ptr);
842 tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
843 break;
844 case INDEX_op_ld32s_i64:
845 t0 = *tb_ptr++;
846 t1 = tci_read_r(&tb_ptr);
847 t2 = tci_read_s32(&tb_ptr);
848 tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
849 break;
850 case INDEX_op_ld_i64:
851 t0 = *tb_ptr++;
852 t1 = tci_read_r(&tb_ptr);
853 t2 = tci_read_s32(&tb_ptr);
854 tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
855 break;
856 case INDEX_op_st8_i64:
857 t0 = tci_read_r8(&tb_ptr);
858 t1 = tci_read_r(&tb_ptr);
859 t2 = tci_read_s32(&tb_ptr);
860 *(uint8_t *)(t1 + t2) = t0;
861 break;
862 case INDEX_op_st16_i64:
863 t0 = tci_read_r16(&tb_ptr);
864 t1 = tci_read_r(&tb_ptr);
865 t2 = tci_read_s32(&tb_ptr);
866 *(uint16_t *)(t1 + t2) = t0;
867 break;
868 case INDEX_op_st32_i64:
869 t0 = tci_read_r32(&tb_ptr);
870 t1 = tci_read_r(&tb_ptr);
871 t2 = tci_read_s32(&tb_ptr);
872 *(uint32_t *)(t1 + t2) = t0;
873 break;
874 case INDEX_op_st_i64:
875 t0 = tci_read_r64(&tb_ptr);
876 t1 = tci_read_r(&tb_ptr);
877 t2 = tci_read_s32(&tb_ptr);
878 assert(t1 != sp_value || (int32_t)t2 < 0);
879 *(uint64_t *)(t1 + t2) = t0;
880 break;
882 /* Arithmetic operations (64 bit). */
884 case INDEX_op_add_i64:
885 t0 = *tb_ptr++;
886 t1 = tci_read_ri64(&tb_ptr);
887 t2 = tci_read_ri64(&tb_ptr);
888 tci_write_reg64(t0, t1 + t2);
889 break;
890 case INDEX_op_sub_i64:
891 t0 = *tb_ptr++;
892 t1 = tci_read_ri64(&tb_ptr);
893 t2 = tci_read_ri64(&tb_ptr);
894 tci_write_reg64(t0, t1 - t2);
895 break;
896 case INDEX_op_mul_i64:
897 t0 = *tb_ptr++;
898 t1 = tci_read_ri64(&tb_ptr);
899 t2 = tci_read_ri64(&tb_ptr);
900 tci_write_reg64(t0, t1 * t2);
901 break;
902 #if TCG_TARGET_HAS_div_i64
903 case INDEX_op_div_i64:
904 case INDEX_op_divu_i64:
905 case INDEX_op_rem_i64:
906 case INDEX_op_remu_i64:
907 TODO();
908 break;
909 #elif TCG_TARGET_HAS_div2_i64
910 case INDEX_op_div2_i64:
911 case INDEX_op_divu2_i64:
912 TODO();
913 break;
914 #endif
915 case INDEX_op_and_i64:
916 t0 = *tb_ptr++;
917 t1 = tci_read_ri64(&tb_ptr);
918 t2 = tci_read_ri64(&tb_ptr);
919 tci_write_reg64(t0, t1 & t2);
920 break;
921 case INDEX_op_or_i64:
922 t0 = *tb_ptr++;
923 t1 = tci_read_ri64(&tb_ptr);
924 t2 = tci_read_ri64(&tb_ptr);
925 tci_write_reg64(t0, t1 | t2);
926 break;
927 case INDEX_op_xor_i64:
928 t0 = *tb_ptr++;
929 t1 = tci_read_ri64(&tb_ptr);
930 t2 = tci_read_ri64(&tb_ptr);
931 tci_write_reg64(t0, t1 ^ t2);
932 break;
934 /* Shift/rotate operations (64 bit). */
936 case INDEX_op_shl_i64:
937 t0 = *tb_ptr++;
938 t1 = tci_read_ri64(&tb_ptr);
939 t2 = tci_read_ri64(&tb_ptr);
940 tci_write_reg64(t0, t1 << (t2 & 63));
941 break;
942 case INDEX_op_shr_i64:
943 t0 = *tb_ptr++;
944 t1 = tci_read_ri64(&tb_ptr);
945 t2 = tci_read_ri64(&tb_ptr);
946 tci_write_reg64(t0, t1 >> (t2 & 63));
947 break;
948 case INDEX_op_sar_i64:
949 t0 = *tb_ptr++;
950 t1 = tci_read_ri64(&tb_ptr);
951 t2 = tci_read_ri64(&tb_ptr);
952 tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
953 break;
954 #if TCG_TARGET_HAS_rot_i64
955 case INDEX_op_rotl_i64:
956 t0 = *tb_ptr++;
957 t1 = tci_read_ri64(&tb_ptr);
958 t2 = tci_read_ri64(&tb_ptr);
959 tci_write_reg64(t0, rol64(t1, t2 & 63));
960 break;
961 case INDEX_op_rotr_i64:
962 t0 = *tb_ptr++;
963 t1 = tci_read_ri64(&tb_ptr);
964 t2 = tci_read_ri64(&tb_ptr);
965 tci_write_reg64(t0, ror64(t1, t2 & 63));
966 break;
967 #endif
968 #if TCG_TARGET_HAS_deposit_i64
969 case INDEX_op_deposit_i64:
970 t0 = *tb_ptr++;
971 t1 = tci_read_r64(&tb_ptr);
972 t2 = tci_read_r64(&tb_ptr);
973 tmp16 = *tb_ptr++;
974 tmp8 = *tb_ptr++;
975 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
976 tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
977 break;
978 #endif
979 case INDEX_op_brcond_i64:
980 t0 = tci_read_r64(&tb_ptr);
981 t1 = tci_read_ri64(&tb_ptr);
982 condition = *tb_ptr++;
983 label = tci_read_label(&tb_ptr);
984 if (tci_compare64(t0, t1, condition)) {
985 assert(tb_ptr == old_code_ptr + op_size);
986 tb_ptr = (uint8_t *)label;
987 continue;
989 break;
990 #if TCG_TARGET_HAS_ext8u_i64
991 case INDEX_op_ext8u_i64:
992 t0 = *tb_ptr++;
993 t1 = tci_read_r8(&tb_ptr);
994 tci_write_reg64(t0, t1);
995 break;
996 #endif
997 #if TCG_TARGET_HAS_ext8s_i64
998 case INDEX_op_ext8s_i64:
999 t0 = *tb_ptr++;
1000 t1 = tci_read_r8s(&tb_ptr);
1001 tci_write_reg64(t0, t1);
1002 break;
1003 #endif
1004 #if TCG_TARGET_HAS_ext16s_i64
1005 case INDEX_op_ext16s_i64:
1006 t0 = *tb_ptr++;
1007 t1 = tci_read_r16s(&tb_ptr);
1008 tci_write_reg64(t0, t1);
1009 break;
1010 #endif
1011 #if TCG_TARGET_HAS_ext16u_i64
1012 case INDEX_op_ext16u_i64:
1013 t0 = *tb_ptr++;
1014 t1 = tci_read_r16(&tb_ptr);
1015 tci_write_reg64(t0, t1);
1016 break;
1017 #endif
1018 #if TCG_TARGET_HAS_ext32s_i64
1019 case INDEX_op_ext32s_i64:
1020 t0 = *tb_ptr++;
1021 t1 = tci_read_r32s(&tb_ptr);
1022 tci_write_reg64(t0, t1);
1023 break;
1024 #endif
1025 #if TCG_TARGET_HAS_ext32u_i64
1026 case INDEX_op_ext32u_i64:
1027 t0 = *tb_ptr++;
1028 t1 = tci_read_r32(&tb_ptr);
1029 tci_write_reg64(t0, t1);
1030 break;
1031 #endif
1032 #if TCG_TARGET_HAS_bswap16_i64
1033 case INDEX_op_bswap16_i64:
1034 TODO();
1035 t0 = *tb_ptr++;
1036 t1 = tci_read_r16(&tb_ptr);
1037 tci_write_reg64(t0, bswap16(t1));
1038 break;
1039 #endif
1040 #if TCG_TARGET_HAS_bswap32_i64
1041 case INDEX_op_bswap32_i64:
1042 t0 = *tb_ptr++;
1043 t1 = tci_read_r32(&tb_ptr);
1044 tci_write_reg64(t0, bswap32(t1));
1045 break;
1046 #endif
1047 #if TCG_TARGET_HAS_bswap64_i64
1048 case INDEX_op_bswap64_i64:
1049 t0 = *tb_ptr++;
1050 t1 = tci_read_r64(&tb_ptr);
1051 tci_write_reg64(t0, bswap64(t1));
1052 break;
1053 #endif
1054 #if TCG_TARGET_HAS_not_i64
1055 case INDEX_op_not_i64:
1056 t0 = *tb_ptr++;
1057 t1 = tci_read_r64(&tb_ptr);
1058 tci_write_reg64(t0, ~t1);
1059 break;
1060 #endif
1061 #if TCG_TARGET_HAS_neg_i64
1062 case INDEX_op_neg_i64:
1063 t0 = *tb_ptr++;
1064 t1 = tci_read_r64(&tb_ptr);
1065 tci_write_reg64(t0, -t1);
1066 break;
1067 #endif
1068 #endif /* TCG_TARGET_REG_BITS == 64 */
1070 /* QEMU specific operations. */
1072 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1073 case INDEX_op_debug_insn_start:
1074 TODO();
1075 break;
1076 #else
1077 case INDEX_op_debug_insn_start:
1078 TODO();
1079 break;
1080 #endif
1081 case INDEX_op_exit_tb:
1082 next_tb = *(uint64_t *)tb_ptr;
1083 goto exit;
1084 break;
1085 case INDEX_op_goto_tb:
1086 t0 = tci_read_i32(&tb_ptr);
1087 assert(tb_ptr == old_code_ptr + op_size);
1088 tb_ptr += (int32_t)t0;
1089 continue;
1090 case INDEX_op_qemu_ld8u:
1091 t0 = *tb_ptr++;
1092 taddr = tci_read_ulong(&tb_ptr);
1093 #ifdef CONFIG_SOFTMMU
1094 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1095 #else
1096 host_addr = (tcg_target_ulong)taddr;
1097 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1098 #endif
1099 tci_write_reg8(t0, tmp8);
1100 break;
1101 case INDEX_op_qemu_ld8s:
1102 t0 = *tb_ptr++;
1103 taddr = tci_read_ulong(&tb_ptr);
1104 #ifdef CONFIG_SOFTMMU
1105 tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1106 #else
1107 host_addr = (tcg_target_ulong)taddr;
1108 tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1109 #endif
1110 tci_write_reg8s(t0, tmp8);
1111 break;
1112 case INDEX_op_qemu_ld16u:
1113 t0 = *tb_ptr++;
1114 taddr = tci_read_ulong(&tb_ptr);
1115 #ifdef CONFIG_SOFTMMU
1116 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1117 #else
1118 host_addr = (tcg_target_ulong)taddr;
1119 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1120 #endif
1121 tci_write_reg16(t0, tmp16);
1122 break;
1123 case INDEX_op_qemu_ld16s:
1124 t0 = *tb_ptr++;
1125 taddr = tci_read_ulong(&tb_ptr);
1126 #ifdef CONFIG_SOFTMMU
1127 tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1128 #else
1129 host_addr = (tcg_target_ulong)taddr;
1130 tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1131 #endif
1132 tci_write_reg16s(t0, tmp16);
1133 break;
1134 #if TCG_TARGET_REG_BITS == 64
1135 case INDEX_op_qemu_ld32u:
1136 t0 = *tb_ptr++;
1137 taddr = tci_read_ulong(&tb_ptr);
1138 #ifdef CONFIG_SOFTMMU
1139 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1140 #else
1141 host_addr = (tcg_target_ulong)taddr;
1142 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1143 #endif
1144 tci_write_reg32(t0, tmp32);
1145 break;
1146 case INDEX_op_qemu_ld32s:
1147 t0 = *tb_ptr++;
1148 taddr = tci_read_ulong(&tb_ptr);
1149 #ifdef CONFIG_SOFTMMU
1150 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1151 #else
1152 host_addr = (tcg_target_ulong)taddr;
1153 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1154 #endif
1155 tci_write_reg32s(t0, tmp32);
1156 break;
1157 #endif /* TCG_TARGET_REG_BITS == 64 */
1158 case INDEX_op_qemu_ld32:
1159 t0 = *tb_ptr++;
1160 taddr = tci_read_ulong(&tb_ptr);
1161 #ifdef CONFIG_SOFTMMU
1162 tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1163 #else
1164 host_addr = (tcg_target_ulong)taddr;
1165 tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1166 #endif
1167 tci_write_reg32(t0, tmp32);
1168 break;
1169 case INDEX_op_qemu_ld64:
1170 t0 = *tb_ptr++;
1171 #if TCG_TARGET_REG_BITS == 32
1172 t1 = *tb_ptr++;
1173 #endif
1174 taddr = tci_read_ulong(&tb_ptr);
1175 #ifdef CONFIG_SOFTMMU
1176 tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
1177 #else
1178 host_addr = (tcg_target_ulong)taddr;
1179 tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1180 #endif
1181 tci_write_reg(t0, tmp64);
1182 #if TCG_TARGET_REG_BITS == 32
1183 tci_write_reg(t1, tmp64 >> 32);
1184 #endif
1185 break;
1186 case INDEX_op_qemu_st8:
1187 t0 = tci_read_r8(&tb_ptr);
1188 taddr = tci_read_ulong(&tb_ptr);
1189 #ifdef CONFIG_SOFTMMU
1190 t2 = tci_read_i(&tb_ptr);
1191 helper_stb_mmu(env, taddr, t0, t2);
1192 #else
1193 host_addr = (tcg_target_ulong)taddr;
1194 *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1195 #endif
1196 break;
1197 case INDEX_op_qemu_st16:
1198 t0 = tci_read_r16(&tb_ptr);
1199 taddr = tci_read_ulong(&tb_ptr);
1200 #ifdef CONFIG_SOFTMMU
1201 t2 = tci_read_i(&tb_ptr);
1202 helper_stw_mmu(env, taddr, t0, t2);
1203 #else
1204 host_addr = (tcg_target_ulong)taddr;
1205 *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1206 #endif
1207 break;
1208 case INDEX_op_qemu_st32:
1209 t0 = tci_read_r32(&tb_ptr);
1210 taddr = tci_read_ulong(&tb_ptr);
1211 #ifdef CONFIG_SOFTMMU
1212 t2 = tci_read_i(&tb_ptr);
1213 helper_stl_mmu(env, taddr, t0, t2);
1214 #else
1215 host_addr = (tcg_target_ulong)taddr;
1216 *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1217 #endif
1218 break;
1219 case INDEX_op_qemu_st64:
1220 tmp64 = tci_read_r64(&tb_ptr);
1221 taddr = tci_read_ulong(&tb_ptr);
1222 #ifdef CONFIG_SOFTMMU
1223 t2 = tci_read_i(&tb_ptr);
1224 helper_stq_mmu(env, taddr, tmp64, t2);
1225 #else
1226 host_addr = (tcg_target_ulong)taddr;
1227 *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1228 #endif
1229 break;
1230 default:
1231 TODO();
1232 break;
1234 assert(tb_ptr == old_code_ptr + op_size);
1236 exit:
1237 return next_tb;