2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 * - See TODO comments in code.
29 /* Marker for missing code. */
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
37 /* Bitfield n...m (in 32 bit value). */
38 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
40 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
43 case INDEX_op_ld8u_i32:
44 case INDEX_op_ld8s_i32:
45 case INDEX_op_ld16u_i32:
46 case INDEX_op_ld16s_i32:
48 case INDEX_op_ld8u_i64:
49 case INDEX_op_ld8s_i64:
50 case INDEX_op_ld16u_i64:
51 case INDEX_op_ld16s_i64:
52 case INDEX_op_ld32u_i64:
53 case INDEX_op_ld32s_i64:
55 case INDEX_op_not_i32:
56 case INDEX_op_not_i64:
57 case INDEX_op_neg_i32:
58 case INDEX_op_neg_i64:
59 case INDEX_op_ext8s_i32:
60 case INDEX_op_ext8s_i64:
61 case INDEX_op_ext16s_i32:
62 case INDEX_op_ext16s_i64:
63 case INDEX_op_ext8u_i32:
64 case INDEX_op_ext8u_i64:
65 case INDEX_op_ext16u_i32:
66 case INDEX_op_ext16u_i64:
67 case INDEX_op_ext32s_i64:
68 case INDEX_op_ext32u_i64:
69 case INDEX_op_ext_i32_i64:
70 case INDEX_op_extu_i32_i64:
71 case INDEX_op_bswap16_i32:
72 case INDEX_op_bswap16_i64:
73 case INDEX_op_bswap32_i32:
74 case INDEX_op_bswap32_i64:
75 case INDEX_op_bswap64_i64:
78 case INDEX_op_st8_i32:
79 case INDEX_op_st16_i32:
81 case INDEX_op_st8_i64:
82 case INDEX_op_st16_i64:
83 case INDEX_op_st32_i64:
87 case INDEX_op_div_i32:
88 case INDEX_op_div_i64:
89 case INDEX_op_divu_i32:
90 case INDEX_op_divu_i64:
91 case INDEX_op_rem_i32:
92 case INDEX_op_rem_i64:
93 case INDEX_op_remu_i32:
94 case INDEX_op_remu_i64:
95 case INDEX_op_add_i32:
96 case INDEX_op_add_i64:
97 case INDEX_op_sub_i32:
98 case INDEX_op_sub_i64:
99 case INDEX_op_mul_i32:
100 case INDEX_op_mul_i64:
101 case INDEX_op_and_i32:
102 case INDEX_op_and_i64:
103 case INDEX_op_andc_i32:
104 case INDEX_op_andc_i64:
105 case INDEX_op_eqv_i32:
106 case INDEX_op_eqv_i64:
107 case INDEX_op_nand_i32:
108 case INDEX_op_nand_i64:
109 case INDEX_op_nor_i32:
110 case INDEX_op_nor_i64:
111 case INDEX_op_or_i32:
112 case INDEX_op_or_i64:
113 case INDEX_op_orc_i32:
114 case INDEX_op_orc_i64:
115 case INDEX_op_xor_i32:
116 case INDEX_op_xor_i64:
117 case INDEX_op_shl_i32:
118 case INDEX_op_shl_i64:
119 case INDEX_op_shr_i32:
120 case INDEX_op_shr_i64:
121 case INDEX_op_sar_i32:
122 case INDEX_op_sar_i64:
123 case INDEX_op_rotl_i32:
124 case INDEX_op_rotl_i64:
125 case INDEX_op_rotr_i32:
126 case INDEX_op_rotr_i64:
127 case INDEX_op_setcond_i32:
128 case INDEX_op_setcond_i64:
129 return C_O1_I2(r, r, r);
131 case INDEX_op_deposit_i32:
132 case INDEX_op_deposit_i64:
133 return C_O1_I2(r, 0, r);
135 case INDEX_op_brcond_i32:
136 case INDEX_op_brcond_i64:
137 return C_O0_I2(r, r);
139 #if TCG_TARGET_REG_BITS == 32
140 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
141 case INDEX_op_add2_i32:
142 case INDEX_op_sub2_i32:
143 return C_O2_I4(r, r, r, r, r, r);
144 case INDEX_op_brcond2_i32:
145 return C_O0_I4(r, r, r, r);
146 case INDEX_op_mulu2_i32:
147 return C_O2_I2(r, r, r, r);
148 case INDEX_op_setcond2_i32:
149 return C_O1_I4(r, r, r, r, r);
152 case INDEX_op_qemu_ld_i32:
153 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
156 case INDEX_op_qemu_ld_i64:
157 return (TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r)
158 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O2_I1(r, r, r)
159 : C_O2_I2(r, r, r, r));
160 case INDEX_op_qemu_st_i32:
161 return (TARGET_LONG_BITS <= TCG_TARGET_REG_BITS
164 case INDEX_op_qemu_st_i64:
165 return (TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r)
166 : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? C_O0_I3(r, r, r)
167 : C_O0_I4(r, r, r, r));
170 g_assert_not_reached();
174 static const int tcg_target_reg_alloc_order[] = {
193 #if MAX_OPC_PARAM_IARGS != 6
194 # error Fix needed, number of supported input arguments changed!
197 static const int tcg_target_call_iarg_regs[] = {
204 #if TCG_TARGET_REG_BITS == 32
205 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
215 static const int tcg_target_call_oarg_regs[] = {
217 #if TCG_TARGET_REG_BITS == 32
222 #ifdef CONFIG_DEBUG_TCG
223 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
243 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
244 intptr_t value, intptr_t addend)
246 /* tcg_out_reloc always uses the same type, addend. */
247 tcg_debug_assert(type == sizeof(tcg_target_long));
248 tcg_debug_assert(addend == 0);
249 tcg_debug_assert(value != 0);
250 if (TCG_TARGET_REG_BITS == 32) {
251 tcg_patch32(code_ptr, value);
253 tcg_patch64(code_ptr, value);
258 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
259 /* Show current bytecode. Used by tcg interpreter. */
260 void tci_disas(uint8_t opc)
262 const TCGOpDef *def = &tcg_op_defs[opc];
263 fprintf(stderr, "TCG %s %u, %u, %u\n",
264 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
268 /* Write value (native size). */
269 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
271 if (TCG_TARGET_REG_BITS == 32) {
279 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
285 /* Write register. */
286 static void tcg_out_r(TCGContext *s, TCGArg t0)
288 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
293 static void tci_out_label(TCGContext *s, TCGLabel *label)
295 if (label->has_value) {
296 tcg_out_i(s, label->u.value);
297 tcg_debug_assert(label->u.value);
299 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
300 s->code_ptr += sizeof(tcg_target_ulong);
304 static void stack_bounds_check(TCGReg base, target_long offset)
306 if (base == TCG_REG_CALL_STACK) {
307 tcg_debug_assert(offset < 0);
308 tcg_debug_assert(offset >= -(CPU_TEMP_BUF_NLONGS * sizeof(long)));
312 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
315 uint8_t *old_code_ptr = s->code_ptr;
317 stack_bounds_check(arg1, arg2);
318 if (type == TCG_TYPE_I32) {
319 tcg_out_op_t(s, INDEX_op_ld_i32);
324 tcg_debug_assert(type == TCG_TYPE_I64);
325 #if TCG_TARGET_REG_BITS == 64
326 tcg_out_op_t(s, INDEX_op_ld_i64);
329 tcg_debug_assert(arg2 == (int32_t)arg2);
335 old_code_ptr[1] = s->code_ptr - old_code_ptr;
338 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
340 uint8_t *old_code_ptr = s->code_ptr;
341 tcg_debug_assert(ret != arg);
342 #if TCG_TARGET_REG_BITS == 32
343 tcg_out_op_t(s, INDEX_op_mov_i32);
345 tcg_out_op_t(s, INDEX_op_mov_i64);
349 old_code_ptr[1] = s->code_ptr - old_code_ptr;
353 static void tcg_out_movi(TCGContext *s, TCGType type,
354 TCGReg t0, tcg_target_long arg)
356 uint8_t *old_code_ptr = s->code_ptr;
357 uint32_t arg32 = arg;
358 if (type == TCG_TYPE_I32 || arg == arg32) {
359 tcg_out_op_t(s, INDEX_op_tci_movi_i32);
363 tcg_debug_assert(type == TCG_TYPE_I64);
364 #if TCG_TARGET_REG_BITS == 64
365 tcg_out_op_t(s, INDEX_op_tci_movi_i64);
372 old_code_ptr[1] = s->code_ptr - old_code_ptr;
375 static inline void tcg_out_call(TCGContext *s, const tcg_insn_unit *arg)
377 uint8_t *old_code_ptr = s->code_ptr;
378 tcg_out_op_t(s, INDEX_op_call);
379 tcg_out_i(s, (uintptr_t)arg);
380 old_code_ptr[1] = s->code_ptr - old_code_ptr;
383 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
384 const int *const_args)
386 uint8_t *old_code_ptr = s->code_ptr;
388 tcg_out_op_t(s, opc);
391 case INDEX_op_exit_tb:
392 tcg_out64(s, args[0]);
394 case INDEX_op_goto_tb:
395 if (s->tb_jmp_insn_offset) {
396 /* Direct jump method. */
397 /* Align for atomic patching and thread safety */
398 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
399 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
402 /* Indirect jump method. */
405 set_jmp_reset_offset(s, args[0]);
408 tci_out_label(s, arg_label(args[0]));
410 case INDEX_op_setcond_i32:
411 tcg_out_r(s, args[0]);
412 tcg_out_r(s, args[1]);
413 tcg_out_r(s, args[2]);
414 tcg_out8(s, args[3]); /* condition */
416 #if TCG_TARGET_REG_BITS == 32
417 case INDEX_op_setcond2_i32:
418 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
419 tcg_out_r(s, args[0]);
420 tcg_out_r(s, args[1]);
421 tcg_out_r(s, args[2]);
422 tcg_out_r(s, args[3]);
423 tcg_out_r(s, args[4]);
424 tcg_out8(s, args[5]); /* condition */
426 #elif TCG_TARGET_REG_BITS == 64
427 case INDEX_op_setcond_i64:
428 tcg_out_r(s, args[0]);
429 tcg_out_r(s, args[1]);
430 tcg_out_r(s, args[2]);
431 tcg_out8(s, args[3]); /* condition */
434 case INDEX_op_ld8u_i32:
435 case INDEX_op_ld8s_i32:
436 case INDEX_op_ld16u_i32:
437 case INDEX_op_ld16s_i32:
438 case INDEX_op_ld_i32:
439 case INDEX_op_st8_i32:
440 case INDEX_op_st16_i32:
441 case INDEX_op_st_i32:
442 case INDEX_op_ld8u_i64:
443 case INDEX_op_ld8s_i64:
444 case INDEX_op_ld16u_i64:
445 case INDEX_op_ld16s_i64:
446 case INDEX_op_ld32u_i64:
447 case INDEX_op_ld32s_i64:
448 case INDEX_op_ld_i64:
449 case INDEX_op_st8_i64:
450 case INDEX_op_st16_i64:
451 case INDEX_op_st32_i64:
452 case INDEX_op_st_i64:
453 stack_bounds_check(args[1], args[2]);
454 tcg_out_r(s, args[0]);
455 tcg_out_r(s, args[1]);
456 tcg_debug_assert(args[2] == (int32_t)args[2]);
457 tcg_out32(s, args[2]);
459 case INDEX_op_add_i32:
460 case INDEX_op_sub_i32:
461 case INDEX_op_mul_i32:
462 case INDEX_op_and_i32:
463 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
464 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
465 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
466 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
467 case INDEX_op_or_i32:
468 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
469 case INDEX_op_xor_i32:
470 case INDEX_op_shl_i32:
471 case INDEX_op_shr_i32:
472 case INDEX_op_sar_i32:
473 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
474 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
475 tcg_out_r(s, args[0]);
476 tcg_out_r(s, args[1]);
477 tcg_out_r(s, args[2]);
479 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
480 tcg_out_r(s, args[0]);
481 tcg_out_r(s, args[1]);
482 tcg_out_r(s, args[2]);
483 tcg_debug_assert(args[3] <= UINT8_MAX);
484 tcg_out8(s, args[3]);
485 tcg_debug_assert(args[4] <= UINT8_MAX);
486 tcg_out8(s, args[4]);
489 #if TCG_TARGET_REG_BITS == 64
490 case INDEX_op_add_i64:
491 case INDEX_op_sub_i64:
492 case INDEX_op_mul_i64:
493 case INDEX_op_and_i64:
494 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
495 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
496 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
497 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
498 case INDEX_op_or_i64:
499 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
500 case INDEX_op_xor_i64:
501 case INDEX_op_shl_i64:
502 case INDEX_op_shr_i64:
503 case INDEX_op_sar_i64:
504 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
505 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
506 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
507 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
508 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
509 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
510 tcg_out_r(s, args[0]);
511 tcg_out_r(s, args[1]);
512 tcg_out_r(s, args[2]);
514 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
515 tcg_out_r(s, args[0]);
516 tcg_out_r(s, args[1]);
517 tcg_out_r(s, args[2]);
518 tcg_debug_assert(args[3] <= UINT8_MAX);
519 tcg_out8(s, args[3]);
520 tcg_debug_assert(args[4] <= UINT8_MAX);
521 tcg_out8(s, args[4]);
523 case INDEX_op_brcond_i64:
524 tcg_out_r(s, args[0]);
525 tcg_out_r(s, args[1]);
526 tcg_out8(s, args[2]); /* condition */
527 tci_out_label(s, arg_label(args[3]));
529 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
530 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
531 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
532 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
533 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
534 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
535 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
536 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
537 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
538 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
539 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
540 case INDEX_op_ext_i32_i64:
541 case INDEX_op_extu_i32_i64:
542 #endif /* TCG_TARGET_REG_BITS == 64 */
543 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
544 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
545 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
546 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
547 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
548 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
549 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
550 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
551 tcg_out_r(s, args[0]);
552 tcg_out_r(s, args[1]);
554 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
555 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
556 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
557 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
558 tcg_out_r(s, args[0]);
559 tcg_out_r(s, args[1]);
560 tcg_out_r(s, args[2]);
562 #if TCG_TARGET_REG_BITS == 32
563 case INDEX_op_add2_i32:
564 case INDEX_op_sub2_i32:
565 tcg_out_r(s, args[0]);
566 tcg_out_r(s, args[1]);
567 tcg_out_r(s, args[2]);
568 tcg_out_r(s, args[3]);
569 tcg_out_r(s, args[4]);
570 tcg_out_r(s, args[5]);
572 case INDEX_op_brcond2_i32:
573 tcg_out_r(s, args[0]);
574 tcg_out_r(s, args[1]);
575 tcg_out_r(s, args[2]);
576 tcg_out_r(s, args[3]);
577 tcg_out8(s, args[4]); /* condition */
578 tci_out_label(s, arg_label(args[5]));
580 case INDEX_op_mulu2_i32:
581 tcg_out_r(s, args[0]);
582 tcg_out_r(s, args[1]);
583 tcg_out_r(s, args[2]);
584 tcg_out_r(s, args[3]);
587 case INDEX_op_brcond_i32:
588 tcg_out_r(s, args[0]);
589 tcg_out_r(s, args[1]);
590 tcg_out8(s, args[2]); /* condition */
591 tci_out_label(s, arg_label(args[3]));
593 case INDEX_op_qemu_ld_i32:
594 tcg_out_r(s, *args++);
595 tcg_out_r(s, *args++);
596 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
597 tcg_out_r(s, *args++);
599 tcg_out_i(s, *args++);
601 case INDEX_op_qemu_ld_i64:
602 tcg_out_r(s, *args++);
603 if (TCG_TARGET_REG_BITS == 32) {
604 tcg_out_r(s, *args++);
606 tcg_out_r(s, *args++);
607 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
608 tcg_out_r(s, *args++);
610 tcg_out_i(s, *args++);
612 case INDEX_op_qemu_st_i32:
613 tcg_out_r(s, *args++);
614 tcg_out_r(s, *args++);
615 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
616 tcg_out_r(s, *args++);
618 tcg_out_i(s, *args++);
620 case INDEX_op_qemu_st_i64:
621 tcg_out_r(s, *args++);
622 if (TCG_TARGET_REG_BITS == 32) {
623 tcg_out_r(s, *args++);
625 tcg_out_r(s, *args++);
626 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
627 tcg_out_r(s, *args++);
629 tcg_out_i(s, *args++);
633 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
634 case INDEX_op_mov_i64:
635 case INDEX_op_call: /* Always emitted via tcg_out_call. */
639 old_code_ptr[1] = s->code_ptr - old_code_ptr;
642 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
645 uint8_t *old_code_ptr = s->code_ptr;
647 stack_bounds_check(arg1, arg2);
648 if (type == TCG_TYPE_I32) {
649 tcg_out_op_t(s, INDEX_op_st_i32);
654 tcg_debug_assert(type == TCG_TYPE_I64);
655 #if TCG_TARGET_REG_BITS == 64
656 tcg_out_op_t(s, INDEX_op_st_i64);
664 old_code_ptr[1] = s->code_ptr - old_code_ptr;
667 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
668 TCGReg base, intptr_t ofs)
673 /* Test if a constant matches the constraint. */
674 static int tcg_target_const_match(tcg_target_long val, TCGType type,
675 const TCGArgConstraint *arg_ct)
677 /* No need to return 0 or 1, 0 or != 0 is good enough. */
678 return arg_ct->ct & TCG_CT_CONST;
681 static void tcg_target_init(TCGContext *s)
683 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
684 const char *envval = getenv("DEBUG_TCG");
686 qemu_set_log(strtol(envval, NULL, 0));
690 /* The current code uses uint8_t for tcg operations. */
691 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
693 /* Registers available for 32 bit operations. */
694 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
695 /* Registers available for 64 bit operations. */
696 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
697 /* TODO: Which registers should be set here? */
698 tcg_target_call_clobber_regs = BIT(TCG_TARGET_NB_REGS) - 1;
700 s->reserved_regs = 0;
701 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
703 /* We use negative offsets from "sp" so that we can distinguish
704 stores that might pretend to be call arguments. */
705 tcg_set_frame(s, TCG_REG_CALL_STACK,
706 -CPU_TEMP_BUF_NLONGS * sizeof(long),
707 CPU_TEMP_BUF_NLONGS * sizeof(long));
710 /* Generate global QEMU prologue and epilogue code. */
711 static inline void tcg_target_qemu_prologue(TCGContext *s)