2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
28 #include "qemu-common.h"
29 #include "qemu/bitops.h"
30 #include "tcg-target.h"
32 /* Default target word size to pointer size. */
33 #ifndef TCG_TARGET_REG_BITS
34 # if UINTPTR_MAX == UINT32_MAX
35 # define TCG_TARGET_REG_BITS 32
36 # elif UINTPTR_MAX == UINT64_MAX
37 # define TCG_TARGET_REG_BITS 64
39 # error Unknown pointer size for tcg target
43 #if TCG_TARGET_REG_BITS == 32
44 typedef int32_t tcg_target_long
;
45 typedef uint32_t tcg_target_ulong
;
46 #define TCG_PRIlx PRIx32
47 #define TCG_PRIld PRId32
48 #elif TCG_TARGET_REG_BITS == 64
49 typedef int64_t tcg_target_long
;
50 typedef uint64_t tcg_target_ulong
;
51 #define TCG_PRIlx PRIx64
52 #define TCG_PRIld PRId64
57 #include "tcg-runtime.h"
59 #if TCG_TARGET_NB_REGS <= 32
60 typedef uint32_t TCGRegSet
;
61 #elif TCG_TARGET_NB_REGS <= 64
62 typedef uint64_t TCGRegSet
;
67 #if TCG_TARGET_REG_BITS == 32
68 /* Turn some undef macros into false macros. */
69 #define TCG_TARGET_HAS_div_i64 0
70 #define TCG_TARGET_HAS_rem_i64 0
71 #define TCG_TARGET_HAS_div2_i64 0
72 #define TCG_TARGET_HAS_rot_i64 0
73 #define TCG_TARGET_HAS_ext8s_i64 0
74 #define TCG_TARGET_HAS_ext16s_i64 0
75 #define TCG_TARGET_HAS_ext32s_i64 0
76 #define TCG_TARGET_HAS_ext8u_i64 0
77 #define TCG_TARGET_HAS_ext16u_i64 0
78 #define TCG_TARGET_HAS_ext32u_i64 0
79 #define TCG_TARGET_HAS_bswap16_i64 0
80 #define TCG_TARGET_HAS_bswap32_i64 0
81 #define TCG_TARGET_HAS_bswap64_i64 0
82 #define TCG_TARGET_HAS_neg_i64 0
83 #define TCG_TARGET_HAS_not_i64 0
84 #define TCG_TARGET_HAS_andc_i64 0
85 #define TCG_TARGET_HAS_orc_i64 0
86 #define TCG_TARGET_HAS_eqv_i64 0
87 #define TCG_TARGET_HAS_nand_i64 0
88 #define TCG_TARGET_HAS_nor_i64 0
89 #define TCG_TARGET_HAS_deposit_i64 0
90 #define TCG_TARGET_HAS_movcond_i64 0
91 #define TCG_TARGET_HAS_add2_i64 0
92 #define TCG_TARGET_HAS_sub2_i64 0
93 #define TCG_TARGET_HAS_mulu2_i64 0
94 #define TCG_TARGET_HAS_muls2_i64 0
95 #define TCG_TARGET_HAS_muluh_i64 0
96 #define TCG_TARGET_HAS_mulsh_i64 0
97 /* Turn some undef macros into true macros. */
98 #define TCG_TARGET_HAS_add2_i32 1
99 #define TCG_TARGET_HAS_sub2_i32 1
100 #define TCG_TARGET_HAS_mulu2_i32 1
103 #ifndef TCG_TARGET_deposit_i32_valid
104 #define TCG_TARGET_deposit_i32_valid(ofs, len) 1
106 #ifndef TCG_TARGET_deposit_i64_valid
107 #define TCG_TARGET_deposit_i64_valid(ofs, len) 1
110 /* Only one of DIV or DIV2 should be defined. */
111 #if defined(TCG_TARGET_HAS_div_i32)
112 #define TCG_TARGET_HAS_div2_i32 0
113 #elif defined(TCG_TARGET_HAS_div2_i32)
114 #define TCG_TARGET_HAS_div_i32 0
115 #define TCG_TARGET_HAS_rem_i32 0
117 #if defined(TCG_TARGET_HAS_div_i64)
118 #define TCG_TARGET_HAS_div2_i64 0
119 #elif defined(TCG_TARGET_HAS_div2_i64)
120 #define TCG_TARGET_HAS_div_i64 0
121 #define TCG_TARGET_HAS_rem_i64 0
124 typedef enum TCGOpcode
{
125 #define DEF(name, oargs, iargs, cargs, flags) INDEX_op_ ## name,
131 #define tcg_regset_clear(d) (d) = 0
132 #define tcg_regset_set(d, s) (d) = (s)
133 #define tcg_regset_set32(d, reg, val32) (d) |= (val32) << (reg)
134 #define tcg_regset_set_reg(d, r) (d) |= 1L << (r)
135 #define tcg_regset_reset_reg(d, r) (d) &= ~(1L << (r))
136 #define tcg_regset_test_reg(d, r) (((d) >> (r)) & 1)
137 #define tcg_regset_or(d, a, b) (d) = (a) | (b)
138 #define tcg_regset_and(d, a, b) (d) = (a) & (b)
139 #define tcg_regset_andnot(d, a, b) (d) = (a) & ~(b)
140 #define tcg_regset_not(d, a) (d) = ~(a)
142 typedef struct TCGRelocation
{
143 struct TCGRelocation
*next
;
149 typedef struct TCGLabel
{
153 TCGRelocation
*first_reloc
;
157 typedef struct TCGPool
{
158 struct TCGPool
*next
;
160 uint8_t data
[0] __attribute__ ((aligned
));
163 #define TCG_POOL_CHUNK_SIZE 32768
165 #define TCG_MAX_LABELS 512
167 #define TCG_MAX_TEMPS 512
169 /* when the size of the arguments of a called function is smaller than
170 this value, they are statically allocated in the TB stack frame */
171 #define TCG_STATIC_CALL_ARGS_SIZE 128
173 typedef enum TCGType
{
176 TCG_TYPE_COUNT
, /* number of different types */
178 /* An alias for the size of the host register. */
179 #if TCG_TARGET_REG_BITS == 32
180 TCG_TYPE_REG
= TCG_TYPE_I32
,
182 TCG_TYPE_REG
= TCG_TYPE_I64
,
185 /* An alias for the size of the native pointer. */
186 #if UINTPTR_MAX == UINT32_MAX
187 TCG_TYPE_PTR
= TCG_TYPE_I32
,
189 TCG_TYPE_PTR
= TCG_TYPE_I64
,
192 /* An alias for the size of the target "long", aka register. */
193 #if TARGET_LONG_BITS == 64
194 TCG_TYPE_TL
= TCG_TYPE_I64
,
196 TCG_TYPE_TL
= TCG_TYPE_I32
,
200 /* Constants for qemu_ld and qemu_st for the Memory Operation field. */
201 typedef enum TCGMemOp
{
206 MO_SIZE
= 3, /* Mask for the above. */
208 MO_SIGN
= 4, /* Sign-extended, otherwise zero-extended. */
210 MO_BSWAP
= 8, /* Host reverse endian. */
211 #ifdef HOST_WORDS_BIGENDIAN
218 #ifdef TARGET_WORDS_BIGENDIAN
224 /* Combinations of the above, for ease of use. */
228 MO_SB
= MO_SIGN
| MO_8
,
229 MO_SW
= MO_SIGN
| MO_16
,
230 MO_SL
= MO_SIGN
| MO_32
,
233 MO_LEUW
= MO_LE
| MO_UW
,
234 MO_LEUL
= MO_LE
| MO_UL
,
235 MO_LESW
= MO_LE
| MO_SW
,
236 MO_LESL
= MO_LE
| MO_SL
,
237 MO_LEQ
= MO_LE
| MO_Q
,
239 MO_BEUW
= MO_BE
| MO_UW
,
240 MO_BEUL
= MO_BE
| MO_UL
,
241 MO_BESW
= MO_BE
| MO_SW
,
242 MO_BESL
= MO_BE
| MO_SL
,
243 MO_BEQ
= MO_BE
| MO_Q
,
245 MO_TEUW
= MO_TE
| MO_UW
,
246 MO_TEUL
= MO_TE
| MO_UL
,
247 MO_TESW
= MO_TE
| MO_SW
,
248 MO_TESL
= MO_TE
| MO_SL
,
249 MO_TEQ
= MO_TE
| MO_Q
,
251 MO_SSIZE
= MO_SIZE
| MO_SIGN
,
254 typedef tcg_target_ulong TCGArg
;
256 /* Define a type and accessor macros for variables. Using a struct is
257 nice because it gives some level of type safely. Ideally the compiler
258 be able to see through all this. However in practice this is not true,
259 especially on targets with braindamaged ABIs (e.g. i386).
260 We use plain int by default to avoid this runtime overhead.
261 Users of tcg_gen_* don't need to know about any of this, and should
262 treat TCGv as an opaque type.
263 In addition we do typechecking for different types of variables. TCGv_i32
264 and TCGv_i64 are 32/64-bit variables respectively. TCGv and TCGv_ptr
265 are aliases for target_ulong and host pointer sized values respectively.
268 #ifdef CONFIG_DEBUG_TCG
288 #define MAKE_TCGV_I32(i) __extension__ \
289 ({ TCGv_i32 make_tcgv_tmp = {i}; make_tcgv_tmp;})
290 #define MAKE_TCGV_I64(i) __extension__ \
291 ({ TCGv_i64 make_tcgv_tmp = {i}; make_tcgv_tmp;})
292 #define MAKE_TCGV_PTR(i) __extension__ \
293 ({ TCGv_ptr make_tcgv_tmp = {i}; make_tcgv_tmp; })
294 #define GET_TCGV_I32(t) ((t).i32)
295 #define GET_TCGV_I64(t) ((t).i64)
296 #define GET_TCGV_PTR(t) ((t).iptr)
297 #if TCG_TARGET_REG_BITS == 32
298 #define TCGV_LOW(t) MAKE_TCGV_I32(GET_TCGV_I64(t))
299 #define TCGV_HIGH(t) MAKE_TCGV_I32(GET_TCGV_I64(t) + 1)
302 #else /* !DEBUG_TCGV */
304 typedef int TCGv_i32
;
305 typedef int TCGv_i64
;
306 #if TCG_TARGET_REG_BITS == 32
307 #define TCGv_ptr TCGv_i32
309 #define TCGv_ptr TCGv_i64
311 #define MAKE_TCGV_I32(x) (x)
312 #define MAKE_TCGV_I64(x) (x)
313 #define MAKE_TCGV_PTR(x) (x)
314 #define GET_TCGV_I32(t) (t)
315 #define GET_TCGV_I64(t) (t)
316 #define GET_TCGV_PTR(t) (t)
318 #if TCG_TARGET_REG_BITS == 32
319 #define TCGV_LOW(t) (t)
320 #define TCGV_HIGH(t) ((t) + 1)
323 #endif /* DEBUG_TCGV */
325 #define TCGV_EQUAL_I32(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b))
326 #define TCGV_EQUAL_I64(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b))
328 /* Dummy definition to avoid compiler warnings. */
329 #define TCGV_UNUSED_I32(x) x = MAKE_TCGV_I32(-1)
330 #define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
332 #define TCGV_IS_UNUSED_I32(x) (GET_TCGV_I32(x) == -1)
333 #define TCGV_IS_UNUSED_I64(x) (GET_TCGV_I64(x) == -1)
336 /* Helper does not read globals (either directly or through an exception). It
337 implies TCG_CALL_NO_WRITE_GLOBALS. */
338 #define TCG_CALL_NO_READ_GLOBALS 0x0010
339 /* Helper does not write globals */
340 #define TCG_CALL_NO_WRITE_GLOBALS 0x0020
341 /* Helper can be safely suppressed if the return value is not used. */
342 #define TCG_CALL_NO_SIDE_EFFECTS 0x0040
344 /* convenience version of most used call flags */
345 #define TCG_CALL_NO_RWG TCG_CALL_NO_READ_GLOBALS
346 #define TCG_CALL_NO_WG TCG_CALL_NO_WRITE_GLOBALS
347 #define TCG_CALL_NO_SE TCG_CALL_NO_SIDE_EFFECTS
348 #define TCG_CALL_NO_RWG_SE (TCG_CALL_NO_RWG | TCG_CALL_NO_SE)
349 #define TCG_CALL_NO_WG_SE (TCG_CALL_NO_WG | TCG_CALL_NO_SE)
351 /* used to align parameters */
352 #define TCG_CALL_DUMMY_TCGV MAKE_TCGV_I32(-1)
353 #define TCG_CALL_DUMMY_ARG ((TCGArg)(-1))
355 /* Conditions. Note that these are laid out for easy manipulation by
357 bit 0 is used for inverting;
360 bit 3 is used with bit 0 for swapping signed/unsigned. */
363 TCG_COND_NEVER
= 0 | 0 | 0 | 0,
364 TCG_COND_ALWAYS
= 0 | 0 | 0 | 1,
365 TCG_COND_EQ
= 8 | 0 | 0 | 0,
366 TCG_COND_NE
= 8 | 0 | 0 | 1,
368 TCG_COND_LT
= 0 | 0 | 2 | 0,
369 TCG_COND_GE
= 0 | 0 | 2 | 1,
370 TCG_COND_LE
= 8 | 0 | 2 | 0,
371 TCG_COND_GT
= 8 | 0 | 2 | 1,
373 TCG_COND_LTU
= 0 | 4 | 0 | 0,
374 TCG_COND_GEU
= 0 | 4 | 0 | 1,
375 TCG_COND_LEU
= 8 | 4 | 0 | 0,
376 TCG_COND_GTU
= 8 | 4 | 0 | 1,
379 /* Invert the sense of the comparison. */
380 static inline TCGCond
tcg_invert_cond(TCGCond c
)
382 return (TCGCond
)(c
^ 1);
385 /* Swap the operands in a comparison. */
386 static inline TCGCond
tcg_swap_cond(TCGCond c
)
388 return c
& 6 ? (TCGCond
)(c
^ 9) : c
;
391 /* Create an "unsigned" version of a "signed" comparison. */
392 static inline TCGCond
tcg_unsigned_cond(TCGCond c
)
394 return c
& 2 ? (TCGCond
)(c
^ 6) : c
;
397 /* Must a comparison be considered unsigned? */
398 static inline bool is_unsigned_cond(TCGCond c
)
403 /* Create a "high" version of a double-word comparison.
404 This removes equality from a LTE or GTE comparison. */
405 static inline TCGCond
tcg_high_cond(TCGCond c
)
412 return (TCGCond
)(c
^ 8);
418 #define TEMP_VAL_DEAD 0
419 #define TEMP_VAL_REG 1
420 #define TEMP_VAL_MEM 2
421 #define TEMP_VAL_CONST 3
423 /* XXX: optimize memory layout */
424 typedef struct TCGTemp
{
432 unsigned int fixed_reg
:1;
433 unsigned int mem_coherent
:1;
434 unsigned int mem_allocated
:1;
435 unsigned int temp_local
:1; /* If true, the temp is saved across
436 basic blocks. Otherwise, it is not
437 preserved across basic blocks. */
438 unsigned int temp_allocated
:1; /* never used for code gen */
442 typedef struct TCGContext TCGContext
;
444 typedef struct TCGTempSet
{
445 unsigned long l
[BITS_TO_LONGS(TCG_MAX_TEMPS
)];
449 uint8_t *pool_cur
, *pool_end
;
450 TCGPool
*pool_first
, *pool_current
, *pool_first_large
;
456 /* goto_tb support */
459 uint16_t *tb_next_offset
;
460 uint16_t *tb_jmp_offset
; /* != NULL if USE_DIRECT_JUMP */
462 /* liveness analysis */
463 uint16_t *op_dead_args
; /* for each operation, each bit tells if the
464 corresponding argument is dead */
465 uint8_t *op_sync_args
; /* for each operation, each bit tells if the
466 corresponding output argument needs to be
469 /* tells in which temporary a given register is. It does not take
470 into account fixed registers */
471 int reg_to_temp
[TCG_TARGET_NB_REGS
];
472 TCGRegSet reserved_regs
;
473 intptr_t current_frame_offset
;
474 intptr_t frame_start
;
479 TCGTemp temps
[TCG_MAX_TEMPS
]; /* globals first, temps after */
480 TCGTempSet free_temps
[TCG_TYPE_COUNT
* 2];
484 #ifdef CONFIG_PROFILER
488 int64_t op_count
; /* total insn count */
489 int op_count_max
; /* max insn per TB */
492 int64_t del_op_count
;
494 int64_t code_out_len
;
499 int64_t restore_count
;
500 int64_t restore_time
;
503 #ifdef CONFIG_DEBUG_TCG
505 int goto_tb_issue_mask
;
508 uint16_t gen_opc_buf
[OPC_BUF_SIZE
];
509 TCGArg gen_opparam_buf
[OPPARAM_BUF_SIZE
];
511 uint16_t *gen_opc_ptr
;
512 TCGArg
*gen_opparam_ptr
;
513 target_ulong gen_opc_pc
[OPC_BUF_SIZE
];
514 uint16_t gen_opc_icount
[OPC_BUF_SIZE
];
515 uint8_t gen_opc_instr_start
[OPC_BUF_SIZE
];
517 /* Code generation */
518 int code_gen_max_blocks
;
519 uint8_t *code_gen_prologue
;
520 uint8_t *code_gen_buffer
;
521 size_t code_gen_buffer_size
;
522 /* threshold to flush the translated code buffer */
523 size_t code_gen_buffer_max_size
;
524 uint8_t *code_gen_ptr
;
528 /* The TCGBackendData structure is private to tcg-target.c. */
529 struct TCGBackendData
*be
;
532 extern TCGContext tcg_ctx
;
534 /* pool based memory allocation */
536 void *tcg_malloc_internal(TCGContext
*s
, int size
);
537 void tcg_pool_reset(TCGContext
*s
);
538 void tcg_pool_delete(TCGContext
*s
);
540 static inline void *tcg_malloc(int size
)
542 TCGContext
*s
= &tcg_ctx
;
543 uint8_t *ptr
, *ptr_end
;
544 size
= (size
+ sizeof(long) - 1) & ~(sizeof(long) - 1);
546 ptr_end
= ptr
+ size
;
547 if (unlikely(ptr_end
> s
->pool_end
)) {
548 return tcg_malloc_internal(&tcg_ctx
, size
);
550 s
->pool_cur
= ptr_end
;
555 void tcg_context_init(TCGContext
*s
);
556 void tcg_prologue_init(TCGContext
*s
);
557 void tcg_func_start(TCGContext
*s
);
559 int tcg_gen_code(TCGContext
*s
, uint8_t *gen_code_buf
);
560 int tcg_gen_code_search_pc(TCGContext
*s
, uint8_t *gen_code_buf
, long offset
);
562 void tcg_set_frame(TCGContext
*s
, int reg
, intptr_t start
, intptr_t size
);
564 TCGv_i32
tcg_global_reg_new_i32(int reg
, const char *name
);
565 TCGv_i32
tcg_global_mem_new_i32(int reg
, intptr_t offset
, const char *name
);
566 TCGv_i32
tcg_temp_new_internal_i32(int temp_local
);
567 static inline TCGv_i32
tcg_temp_new_i32(void)
569 return tcg_temp_new_internal_i32(0);
571 static inline TCGv_i32
tcg_temp_local_new_i32(void)
573 return tcg_temp_new_internal_i32(1);
575 void tcg_temp_free_i32(TCGv_i32 arg
);
576 char *tcg_get_arg_str_i32(TCGContext
*s
, char *buf
, int buf_size
, TCGv_i32 arg
);
578 TCGv_i64
tcg_global_reg_new_i64(int reg
, const char *name
);
579 TCGv_i64
tcg_global_mem_new_i64(int reg
, intptr_t offset
, const char *name
);
580 TCGv_i64
tcg_temp_new_internal_i64(int temp_local
);
581 static inline TCGv_i64
tcg_temp_new_i64(void)
583 return tcg_temp_new_internal_i64(0);
585 static inline TCGv_i64
tcg_temp_local_new_i64(void)
587 return tcg_temp_new_internal_i64(1);
589 void tcg_temp_free_i64(TCGv_i64 arg
);
590 char *tcg_get_arg_str_i64(TCGContext
*s
, char *buf
, int buf_size
, TCGv_i64 arg
);
592 #if defined(CONFIG_DEBUG_TCG)
593 /* If you call tcg_clear_temp_count() at the start of a section of
594 * code which is not supposed to leak any TCG temporaries, then
595 * calling tcg_check_temp_count() at the end of the section will
596 * return 1 if the section did in fact leak a temporary.
598 void tcg_clear_temp_count(void);
599 int tcg_check_temp_count(void);
601 #define tcg_clear_temp_count() do { } while (0)
602 #define tcg_check_temp_count() 0
605 void tcg_dump_info(FILE *f
, fprintf_function cpu_fprintf
);
607 #define TCG_CT_ALIAS 0x80
608 #define TCG_CT_IALIAS 0x40
609 #define TCG_CT_REG 0x01
610 #define TCG_CT_CONST 0x02 /* any constant of register size */
612 typedef struct TCGArgConstraint
{
620 #define TCG_MAX_OP_ARGS 16
622 /* Bits for TCGOpDef->flags, 8 bits available. */
624 /* Instruction defines the end of a basic block. */
625 TCG_OPF_BB_END
= 0x01,
626 /* Instruction clobbers call registers and potentially update globals. */
627 TCG_OPF_CALL_CLOBBER
= 0x02,
628 /* Instruction has side effects: it cannot be removed if its outputs
629 are not used, and might trigger exceptions. */
630 TCG_OPF_SIDE_EFFECTS
= 0x04,
631 /* Instruction operands are 64-bits (otherwise 32-bits). */
632 TCG_OPF_64BIT
= 0x08,
633 /* Instruction is optional and not implemented by the host, or insn
634 is generic and should not be implemened by the host. */
635 TCG_OPF_NOT_PRESENT
= 0x10,
638 typedef struct TCGOpDef
{
640 uint8_t nb_oargs
, nb_iargs
, nb_cargs
, nb_args
;
642 TCGArgConstraint
*args_ct
;
644 #if defined(CONFIG_DEBUG_TCG)
649 extern TCGOpDef tcg_op_defs
[];
650 extern const size_t tcg_op_defs_max
;
652 typedef struct TCGTargetOpDef
{
654 const char *args_ct_str
[TCG_MAX_OP_ARGS
];
657 #define tcg_abort() \
659 fprintf(stderr, "%s:%d: tcg fatal error\n", __FILE__, __LINE__);\
663 #ifdef CONFIG_DEBUG_TCG
664 # define tcg_debug_assert(X) do { assert(X); } while (0)
665 #elif QEMU_GNUC_PREREQ(4, 5)
666 # define tcg_debug_assert(X) \
667 do { if (!(X)) { __builtin_unreachable(); } } while (0)
669 # define tcg_debug_assert(X) do { (void)(X); } while (0)
672 void tcg_add_target_add_op_defs(const TCGTargetOpDef
*tdefs
);
674 #if UINTPTR_MAX == UINT32_MAX
675 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I32(n))
676 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I32(GET_TCGV_PTR(n))
678 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i32((intptr_t)(V)))
679 #define tcg_global_reg_new_ptr(R, N) \
680 TCGV_NAT_TO_PTR(tcg_global_reg_new_i32((R), (N)))
681 #define tcg_global_mem_new_ptr(R, O, N) \
682 TCGV_NAT_TO_PTR(tcg_global_mem_new_i32((R), (O), (N)))
683 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i32())
684 #define tcg_temp_free_ptr(T) tcg_temp_free_i32(TCGV_PTR_TO_NAT(T))
686 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I64(n))
687 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I64(GET_TCGV_PTR(n))
689 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i64((intptr_t)(V)))
690 #define tcg_global_reg_new_ptr(R, N) \
691 TCGV_NAT_TO_PTR(tcg_global_reg_new_i64((R), (N)))
692 #define tcg_global_mem_new_ptr(R, O, N) \
693 TCGV_NAT_TO_PTR(tcg_global_mem_new_i64((R), (O), (N)))
694 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i64())
695 #define tcg_temp_free_ptr(T) tcg_temp_free_i64(TCGV_PTR_TO_NAT(T))
698 void tcg_gen_callN(TCGContext
*s
, TCGv_ptr func
, unsigned int flags
,
699 int sizemask
, TCGArg ret
, int nargs
, TCGArg
*args
);
701 void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
702 int c
, int right
, int arith
);
704 TCGArg
*tcg_optimize(TCGContext
*s
, uint16_t *tcg_opc_ptr
, TCGArg
*args
,
705 TCGOpDef
*tcg_op_def
);
707 /* only used for debugging purposes */
708 void tcg_dump_ops(TCGContext
*s
);
710 void dump_ops(const uint16_t *opc_buf
, const TCGArg
*opparam_buf
);
711 TCGv_i32
tcg_const_i32(int32_t val
);
712 TCGv_i64
tcg_const_i64(int64_t val
);
713 TCGv_i32
tcg_const_local_i32(int32_t val
);
714 TCGv_i64
tcg_const_local_i64(int64_t val
);
718 * @env: CPUArchState * for the CPU
719 * @tb_ptr: address of generated code for the TB to execute
721 * Start executing code from a given translation block.
722 * Where translation blocks have been linked, execution
723 * may proceed from the given TB into successive ones.
724 * Control eventually returns only when some action is needed
725 * from the top-level loop: either control must pass to a TB
726 * which has not yet been directly linked, or an asynchronous
727 * event such as an interrupt needs handling.
729 * The return value is a pointer to the next TB to execute
730 * (if known; otherwise zero). This pointer is assumed to be
731 * 4-aligned, and the bottom two bits are used to return further
733 * 0, 1: the link between this TB and the next is via the specified
734 * TB index (0 or 1). That is, we left the TB via (the equivalent
735 * of) "goto_tb <index>". The main loop uses this to determine
736 * how to link the TB just executed to the next.
737 * 2: we are using instruction counting code generation, and we
738 * did not start executing this TB because the instruction counter
739 * would hit zero midway through it. In this case the next-TB pointer
740 * returned is the TB we were about to execute, and the caller must
741 * arrange to execute the remaining count of instructions.
742 * 3: we stopped because the CPU's exit_request flag was set
743 * (usually meaning that there is an interrupt that needs to be
744 * handled). The next-TB pointer returned is the TB we were
745 * about to execute when we noticed the pending exit request.
747 * If the bottom two bits indicate an exit-via-index then the CPU
748 * state is correctly synchronised and ready for execution of the next
749 * TB (and in particular the guest PC is the address to execute next).
750 * Otherwise, we gave up on execution of this TB before it started, and
751 * the caller must fix up the CPU state by calling cpu_pc_from_tb()
752 * with the next-TB pointer we return.
754 * Note that TCG targets may use a different definition of tcg_qemu_tb_exec
755 * to this default (which just calls the prologue.code emitted by
756 * tcg_target_qemu_prologue()).
758 #define TB_EXIT_MASK 3
759 #define TB_EXIT_IDX0 0
760 #define TB_EXIT_IDX1 1
761 #define TB_EXIT_ICOUNT_EXPIRED 2
762 #define TB_EXIT_REQUESTED 3
764 #if !defined(tcg_qemu_tb_exec)
765 # define tcg_qemu_tb_exec(env, tb_ptr) \
766 ((uintptr_t (*)(void *, void *))tcg_ctx.code_gen_prologue)(env, tb_ptr)
769 void tcg_register_jit(void *buf
, size_t buf_size
);
772 * Memory helpers that will be used by TCG generated code.
774 #ifdef CONFIG_SOFTMMU
775 /* Value zero-extended to tcg register size. */
776 tcg_target_ulong
helper_ret_ldub_mmu(CPUArchState
*env
, target_ulong addr
,
777 int mmu_idx
, uintptr_t retaddr
);
778 tcg_target_ulong
helper_le_lduw_mmu(CPUArchState
*env
, target_ulong addr
,
779 int mmu_idx
, uintptr_t retaddr
);
780 tcg_target_ulong
helper_le_ldul_mmu(CPUArchState
*env
, target_ulong addr
,
781 int mmu_idx
, uintptr_t retaddr
);
782 uint64_t helper_le_ldq_mmu(CPUArchState
*env
, target_ulong addr
,
783 int mmu_idx
, uintptr_t retaddr
);
784 tcg_target_ulong
helper_be_lduw_mmu(CPUArchState
*env
, target_ulong addr
,
785 int mmu_idx
, uintptr_t retaddr
);
786 tcg_target_ulong
helper_be_ldul_mmu(CPUArchState
*env
, target_ulong addr
,
787 int mmu_idx
, uintptr_t retaddr
);
788 uint64_t helper_be_ldq_mmu(CPUArchState
*env
, target_ulong addr
,
789 int mmu_idx
, uintptr_t retaddr
);
791 /* Value sign-extended to tcg register size. */
792 tcg_target_ulong
helper_ret_ldsb_mmu(CPUArchState
*env
, target_ulong addr
,
793 int mmu_idx
, uintptr_t retaddr
);
794 tcg_target_ulong
helper_le_ldsw_mmu(CPUArchState
*env
, target_ulong addr
,
795 int mmu_idx
, uintptr_t retaddr
);
796 tcg_target_ulong
helper_le_ldsl_mmu(CPUArchState
*env
, target_ulong addr
,
797 int mmu_idx
, uintptr_t retaddr
);
798 tcg_target_ulong
helper_be_ldsw_mmu(CPUArchState
*env
, target_ulong addr
,
799 int mmu_idx
, uintptr_t retaddr
);
800 tcg_target_ulong
helper_be_ldsl_mmu(CPUArchState
*env
, target_ulong addr
,
801 int mmu_idx
, uintptr_t retaddr
);
803 void helper_ret_stb_mmu(CPUArchState
*env
, target_ulong addr
, uint8_t val
,
804 int mmu_idx
, uintptr_t retaddr
);
805 void helper_le_stw_mmu(CPUArchState
*env
, target_ulong addr
, uint16_t val
,
806 int mmu_idx
, uintptr_t retaddr
);
807 void helper_le_stl_mmu(CPUArchState
*env
, target_ulong addr
, uint32_t val
,
808 int mmu_idx
, uintptr_t retaddr
);
809 void helper_le_stq_mmu(CPUArchState
*env
, target_ulong addr
, uint64_t val
,
810 int mmu_idx
, uintptr_t retaddr
);
811 void helper_be_stw_mmu(CPUArchState
*env
, target_ulong addr
, uint16_t val
,
812 int mmu_idx
, uintptr_t retaddr
);
813 void helper_be_stl_mmu(CPUArchState
*env
, target_ulong addr
, uint32_t val
,
814 int mmu_idx
, uintptr_t retaddr
);
815 void helper_be_stq_mmu(CPUArchState
*env
, target_ulong addr
, uint64_t val
,
816 int mmu_idx
, uintptr_t retaddr
);
818 /* Temporary aliases until backends are converted. */
819 #ifdef TARGET_WORDS_BIGENDIAN
820 # define helper_ret_ldsw_mmu helper_be_ldsw_mmu
821 # define helper_ret_lduw_mmu helper_be_lduw_mmu
822 # define helper_ret_ldsl_mmu helper_be_ldsl_mmu
823 # define helper_ret_ldul_mmu helper_be_ldul_mmu
824 # define helper_ret_ldq_mmu helper_be_ldq_mmu
825 # define helper_ret_stw_mmu helper_be_stw_mmu
826 # define helper_ret_stl_mmu helper_be_stl_mmu
827 # define helper_ret_stq_mmu helper_be_stq_mmu
829 # define helper_ret_ldsw_mmu helper_le_ldsw_mmu
830 # define helper_ret_lduw_mmu helper_le_lduw_mmu
831 # define helper_ret_ldsl_mmu helper_le_ldsl_mmu
832 # define helper_ret_ldul_mmu helper_le_ldul_mmu
833 # define helper_ret_ldq_mmu helper_le_ldq_mmu
834 # define helper_ret_stw_mmu helper_le_stw_mmu
835 # define helper_ret_stl_mmu helper_le_stl_mmu
836 # define helper_ret_stq_mmu helper_le_stq_mmu
839 uint8_t helper_ldb_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
840 uint16_t helper_ldw_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
841 uint32_t helper_ldl_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
842 uint64_t helper_ldq_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
844 void helper_stb_mmu(CPUArchState
*env
, target_ulong addr
,
845 uint8_t val
, int mmu_idx
);
846 void helper_stw_mmu(CPUArchState
*env
, target_ulong addr
,
847 uint16_t val
, int mmu_idx
);
848 void helper_stl_mmu(CPUArchState
*env
, target_ulong addr
,
849 uint32_t val
, int mmu_idx
);
850 void helper_stq_mmu(CPUArchState
*env
, target_ulong addr
,
851 uint64_t val
, int mmu_idx
);
852 #endif /* CONFIG_SOFTMMU */