2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
28 #include "qemu-common.h"
29 #include "qemu/bitops.h"
30 #include "tcg-target.h"
32 /* Default target word size to pointer size. */
33 #ifndef TCG_TARGET_REG_BITS
34 # if UINTPTR_MAX == UINT32_MAX
35 # define TCG_TARGET_REG_BITS 32
36 # elif UINTPTR_MAX == UINT64_MAX
37 # define TCG_TARGET_REG_BITS 64
39 # error Unknown pointer size for tcg target
43 #if TCG_TARGET_REG_BITS == 32
44 typedef int32_t tcg_target_long
;
45 typedef uint32_t tcg_target_ulong
;
46 #define TCG_PRIlx PRIx32
47 #define TCG_PRIld PRId32
48 #elif TCG_TARGET_REG_BITS == 64
49 typedef int64_t tcg_target_long
;
50 typedef uint64_t tcg_target_ulong
;
51 #define TCG_PRIlx PRIx64
52 #define TCG_PRIld PRId64
57 #include "tcg-runtime.h"
59 #if TCG_TARGET_NB_REGS <= 32
60 typedef uint32_t TCGRegSet
;
61 #elif TCG_TARGET_NB_REGS <= 64
62 typedef uint64_t TCGRegSet
;
67 #if TCG_TARGET_REG_BITS == 32
68 /* Turn some undef macros into false macros. */
69 #define TCG_TARGET_HAS_div_i64 0
70 #define TCG_TARGET_HAS_rem_i64 0
71 #define TCG_TARGET_HAS_div2_i64 0
72 #define TCG_TARGET_HAS_rot_i64 0
73 #define TCG_TARGET_HAS_ext8s_i64 0
74 #define TCG_TARGET_HAS_ext16s_i64 0
75 #define TCG_TARGET_HAS_ext32s_i64 0
76 #define TCG_TARGET_HAS_ext8u_i64 0
77 #define TCG_TARGET_HAS_ext16u_i64 0
78 #define TCG_TARGET_HAS_ext32u_i64 0
79 #define TCG_TARGET_HAS_bswap16_i64 0
80 #define TCG_TARGET_HAS_bswap32_i64 0
81 #define TCG_TARGET_HAS_bswap64_i64 0
82 #define TCG_TARGET_HAS_neg_i64 0
83 #define TCG_TARGET_HAS_not_i64 0
84 #define TCG_TARGET_HAS_andc_i64 0
85 #define TCG_TARGET_HAS_orc_i64 0
86 #define TCG_TARGET_HAS_eqv_i64 0
87 #define TCG_TARGET_HAS_nand_i64 0
88 #define TCG_TARGET_HAS_nor_i64 0
89 #define TCG_TARGET_HAS_deposit_i64 0
90 #define TCG_TARGET_HAS_movcond_i64 0
91 #define TCG_TARGET_HAS_add2_i64 0
92 #define TCG_TARGET_HAS_sub2_i64 0
93 #define TCG_TARGET_HAS_mulu2_i64 0
94 #define TCG_TARGET_HAS_muls2_i64 0
95 #define TCG_TARGET_HAS_muluh_i64 0
96 #define TCG_TARGET_HAS_mulsh_i64 0
97 /* Turn some undef macros into true macros. */
98 #define TCG_TARGET_HAS_add2_i32 1
99 #define TCG_TARGET_HAS_sub2_i32 1
100 #define TCG_TARGET_HAS_mulu2_i32 1
103 #ifndef TCG_TARGET_deposit_i32_valid
104 #define TCG_TARGET_deposit_i32_valid(ofs, len) 1
106 #ifndef TCG_TARGET_deposit_i64_valid
107 #define TCG_TARGET_deposit_i64_valid(ofs, len) 1
110 /* Only one of DIV or DIV2 should be defined. */
111 #if defined(TCG_TARGET_HAS_div_i32)
112 #define TCG_TARGET_HAS_div2_i32 0
113 #elif defined(TCG_TARGET_HAS_div2_i32)
114 #define TCG_TARGET_HAS_div_i32 0
115 #define TCG_TARGET_HAS_rem_i32 0
117 #if defined(TCG_TARGET_HAS_div_i64)
118 #define TCG_TARGET_HAS_div2_i64 0
119 #elif defined(TCG_TARGET_HAS_div2_i64)
120 #define TCG_TARGET_HAS_div_i64 0
121 #define TCG_TARGET_HAS_rem_i64 0
124 typedef enum TCGOpcode
{
125 #define DEF(name, oargs, iargs, cargs, flags) INDEX_op_ ## name,
131 #define tcg_regset_clear(d) (d) = 0
132 #define tcg_regset_set(d, s) (d) = (s)
133 #define tcg_regset_set32(d, reg, val32) (d) |= (val32) << (reg)
134 #define tcg_regset_set_reg(d, r) (d) |= 1L << (r)
135 #define tcg_regset_reset_reg(d, r) (d) &= ~(1L << (r))
136 #define tcg_regset_test_reg(d, r) (((d) >> (r)) & 1)
137 #define tcg_regset_or(d, a, b) (d) = (a) | (b)
138 #define tcg_regset_and(d, a, b) (d) = (a) & (b)
139 #define tcg_regset_andnot(d, a, b) (d) = (a) & ~(b)
140 #define tcg_regset_not(d, a) (d) = ~(a)
142 typedef struct TCGRelocation
{
143 struct TCGRelocation
*next
;
149 typedef struct TCGLabel
{
153 TCGRelocation
*first_reloc
;
157 typedef struct TCGPool
{
158 struct TCGPool
*next
;
160 uint8_t data
[0] __attribute__ ((aligned
));
163 #define TCG_POOL_CHUNK_SIZE 32768
165 #define TCG_MAX_LABELS 512
167 #define TCG_MAX_TEMPS 512
169 /* when the size of the arguments of a called function is smaller than
170 this value, they are statically allocated in the TB stack frame */
171 #define TCG_STATIC_CALL_ARGS_SIZE 128
173 typedef enum TCGType
{
176 TCG_TYPE_COUNT
, /* number of different types */
178 /* An alias for the size of the host register. */
179 #if TCG_TARGET_REG_BITS == 32
180 TCG_TYPE_REG
= TCG_TYPE_I32
,
182 TCG_TYPE_REG
= TCG_TYPE_I64
,
185 /* An alias for the size of the native pointer. */
186 #if UINTPTR_MAX == UINT32_MAX
187 TCG_TYPE_PTR
= TCG_TYPE_I32
,
189 TCG_TYPE_PTR
= TCG_TYPE_I64
,
192 /* An alias for the size of the target "long", aka register. */
193 #if TARGET_LONG_BITS == 64
194 TCG_TYPE_TL
= TCG_TYPE_I64
,
196 TCG_TYPE_TL
= TCG_TYPE_I32
,
200 /* Constants for qemu_ld and qemu_st for the Memory Operation field. */
201 typedef enum TCGMemOp
{
206 MO_SIZE
= 3, /* Mask for the above. */
208 MO_SIGN
= 4, /* Sign-extended, otherwise zero-extended. */
210 MO_BSWAP
= 8, /* Host reverse endian. */
211 #ifdef HOST_WORDS_BIGENDIAN
218 #ifdef TARGET_WORDS_BIGENDIAN
224 /* Combinations of the above, for ease of use. */
228 MO_SB
= MO_SIGN
| MO_8
,
229 MO_SW
= MO_SIGN
| MO_16
,
230 MO_SL
= MO_SIGN
| MO_32
,
233 MO_LEUW
= MO_LE
| MO_UW
,
234 MO_LEUL
= MO_LE
| MO_UL
,
235 MO_LESW
= MO_LE
| MO_SW
,
236 MO_LESL
= MO_LE
| MO_SL
,
237 MO_LEQ
= MO_LE
| MO_Q
,
239 MO_BEUW
= MO_BE
| MO_UW
,
240 MO_BEUL
= MO_BE
| MO_UL
,
241 MO_BESW
= MO_BE
| MO_SW
,
242 MO_BESL
= MO_BE
| MO_SL
,
243 MO_BEQ
= MO_BE
| MO_Q
,
245 MO_TEUW
= MO_TE
| MO_UW
,
246 MO_TEUL
= MO_TE
| MO_UL
,
247 MO_TESW
= MO_TE
| MO_SW
,
248 MO_TESL
= MO_TE
| MO_SL
,
249 MO_TEQ
= MO_TE
| MO_Q
,
251 MO_SSIZE
= MO_SIZE
| MO_SIGN
,
254 typedef tcg_target_ulong TCGArg
;
256 /* Define a type and accessor macros for variables. Using a struct is
257 nice because it gives some level of type safely. Ideally the compiler
258 be able to see through all this. However in practice this is not true,
259 especially on targets with braindamaged ABIs (e.g. i386).
260 We use plain int by default to avoid this runtime overhead.
261 Users of tcg_gen_* don't need to know about any of this, and should
262 treat TCGv as an opaque type.
263 In addition we do typechecking for different types of variables. TCGv_i32
264 and TCGv_i64 are 32/64-bit variables respectively. TCGv and TCGv_ptr
265 are aliases for target_ulong and host pointer sized values respectively.
268 #ifdef CONFIG_DEBUG_TCG
288 #define MAKE_TCGV_I32(i) __extension__ \
289 ({ TCGv_i32 make_tcgv_tmp = {i}; make_tcgv_tmp;})
290 #define MAKE_TCGV_I64(i) __extension__ \
291 ({ TCGv_i64 make_tcgv_tmp = {i}; make_tcgv_tmp;})
292 #define MAKE_TCGV_PTR(i) __extension__ \
293 ({ TCGv_ptr make_tcgv_tmp = {i}; make_tcgv_tmp; })
294 #define GET_TCGV_I32(t) ((t).i32)
295 #define GET_TCGV_I64(t) ((t).i64)
296 #define GET_TCGV_PTR(t) ((t).iptr)
297 #if TCG_TARGET_REG_BITS == 32
298 #define TCGV_LOW(t) MAKE_TCGV_I32(GET_TCGV_I64(t))
299 #define TCGV_HIGH(t) MAKE_TCGV_I32(GET_TCGV_I64(t) + 1)
302 #else /* !DEBUG_TCGV */
304 typedef int TCGv_i32
;
305 typedef int TCGv_i64
;
306 #if TCG_TARGET_REG_BITS == 32
307 #define TCGv_ptr TCGv_i32
309 #define TCGv_ptr TCGv_i64
311 #define MAKE_TCGV_I32(x) (x)
312 #define MAKE_TCGV_I64(x) (x)
313 #define MAKE_TCGV_PTR(x) (x)
314 #define GET_TCGV_I32(t) (t)
315 #define GET_TCGV_I64(t) (t)
316 #define GET_TCGV_PTR(t) (t)
318 #if TCG_TARGET_REG_BITS == 32
319 #define TCGV_LOW(t) (t)
320 #define TCGV_HIGH(t) ((t) + 1)
323 #endif /* DEBUG_TCGV */
325 #define TCGV_EQUAL_I32(a, b) (GET_TCGV_I32(a) == GET_TCGV_I32(b))
326 #define TCGV_EQUAL_I64(a, b) (GET_TCGV_I64(a) == GET_TCGV_I64(b))
327 #define TCGV_EQUAL_PTR(a, b) (GET_TCGV_PTR(a) == GET_TCGV_PTR(b))
329 /* Dummy definition to avoid compiler warnings. */
330 #define TCGV_UNUSED_I32(x) x = MAKE_TCGV_I32(-1)
331 #define TCGV_UNUSED_I64(x) x = MAKE_TCGV_I64(-1)
332 #define TCGV_UNUSED_PTR(x) x = MAKE_TCGV_PTR(-1)
334 #define TCGV_IS_UNUSED_I32(x) (GET_TCGV_I32(x) == -1)
335 #define TCGV_IS_UNUSED_I64(x) (GET_TCGV_I64(x) == -1)
336 #define TCGV_IS_UNUSED_PTR(x) (GET_TCGV_PTR(x) == -1)
339 /* Helper does not read globals (either directly or through an exception). It
340 implies TCG_CALL_NO_WRITE_GLOBALS. */
341 #define TCG_CALL_NO_READ_GLOBALS 0x0010
342 /* Helper does not write globals */
343 #define TCG_CALL_NO_WRITE_GLOBALS 0x0020
344 /* Helper can be safely suppressed if the return value is not used. */
345 #define TCG_CALL_NO_SIDE_EFFECTS 0x0040
347 /* convenience version of most used call flags */
348 #define TCG_CALL_NO_RWG TCG_CALL_NO_READ_GLOBALS
349 #define TCG_CALL_NO_WG TCG_CALL_NO_WRITE_GLOBALS
350 #define TCG_CALL_NO_SE TCG_CALL_NO_SIDE_EFFECTS
351 #define TCG_CALL_NO_RWG_SE (TCG_CALL_NO_RWG | TCG_CALL_NO_SE)
352 #define TCG_CALL_NO_WG_SE (TCG_CALL_NO_WG | TCG_CALL_NO_SE)
354 /* used to align parameters */
355 #define TCG_CALL_DUMMY_TCGV MAKE_TCGV_I32(-1)
356 #define TCG_CALL_DUMMY_ARG ((TCGArg)(-1))
358 /* Conditions. Note that these are laid out for easy manipulation by
360 bit 0 is used for inverting;
363 bit 3 is used with bit 0 for swapping signed/unsigned. */
366 TCG_COND_NEVER
= 0 | 0 | 0 | 0,
367 TCG_COND_ALWAYS
= 0 | 0 | 0 | 1,
368 TCG_COND_EQ
= 8 | 0 | 0 | 0,
369 TCG_COND_NE
= 8 | 0 | 0 | 1,
371 TCG_COND_LT
= 0 | 0 | 2 | 0,
372 TCG_COND_GE
= 0 | 0 | 2 | 1,
373 TCG_COND_LE
= 8 | 0 | 2 | 0,
374 TCG_COND_GT
= 8 | 0 | 2 | 1,
376 TCG_COND_LTU
= 0 | 4 | 0 | 0,
377 TCG_COND_GEU
= 0 | 4 | 0 | 1,
378 TCG_COND_LEU
= 8 | 4 | 0 | 0,
379 TCG_COND_GTU
= 8 | 4 | 0 | 1,
382 /* Invert the sense of the comparison. */
383 static inline TCGCond
tcg_invert_cond(TCGCond c
)
385 return (TCGCond
)(c
^ 1);
388 /* Swap the operands in a comparison. */
389 static inline TCGCond
tcg_swap_cond(TCGCond c
)
391 return c
& 6 ? (TCGCond
)(c
^ 9) : c
;
394 /* Create an "unsigned" version of a "signed" comparison. */
395 static inline TCGCond
tcg_unsigned_cond(TCGCond c
)
397 return c
& 2 ? (TCGCond
)(c
^ 6) : c
;
400 /* Must a comparison be considered unsigned? */
401 static inline bool is_unsigned_cond(TCGCond c
)
406 /* Create a "high" version of a double-word comparison.
407 This removes equality from a LTE or GTE comparison. */
408 static inline TCGCond
tcg_high_cond(TCGCond c
)
415 return (TCGCond
)(c
^ 8);
421 #define TEMP_VAL_DEAD 0
422 #define TEMP_VAL_REG 1
423 #define TEMP_VAL_MEM 2
424 #define TEMP_VAL_CONST 3
426 /* XXX: optimize memory layout */
427 typedef struct TCGTemp
{
435 unsigned int fixed_reg
:1;
436 unsigned int mem_coherent
:1;
437 unsigned int mem_allocated
:1;
438 unsigned int temp_local
:1; /* If true, the temp is saved across
439 basic blocks. Otherwise, it is not
440 preserved across basic blocks. */
441 unsigned int temp_allocated
:1; /* never used for code gen */
445 typedef struct TCGContext TCGContext
;
447 typedef struct TCGTempSet
{
448 unsigned long l
[BITS_TO_LONGS(TCG_MAX_TEMPS
)];
452 uint8_t *pool_cur
, *pool_end
;
453 TCGPool
*pool_first
, *pool_current
, *pool_first_large
;
459 /* goto_tb support */
462 uint16_t *tb_next_offset
;
463 uint16_t *tb_jmp_offset
; /* != NULL if USE_DIRECT_JUMP */
465 /* liveness analysis */
466 uint16_t *op_dead_args
; /* for each operation, each bit tells if the
467 corresponding argument is dead */
468 uint8_t *op_sync_args
; /* for each operation, each bit tells if the
469 corresponding output argument needs to be
472 /* tells in which temporary a given register is. It does not take
473 into account fixed registers */
474 int reg_to_temp
[TCG_TARGET_NB_REGS
];
475 TCGRegSet reserved_regs
;
476 intptr_t current_frame_offset
;
477 intptr_t frame_start
;
482 TCGTemp temps
[TCG_MAX_TEMPS
]; /* globals first, temps after */
483 TCGTempSet free_temps
[TCG_TYPE_COUNT
* 2];
487 #ifdef CONFIG_PROFILER
491 int64_t op_count
; /* total insn count */
492 int op_count_max
; /* max insn per TB */
495 int64_t del_op_count
;
497 int64_t code_out_len
;
502 int64_t restore_count
;
503 int64_t restore_time
;
506 #ifdef CONFIG_DEBUG_TCG
508 int goto_tb_issue_mask
;
511 uint16_t gen_opc_buf
[OPC_BUF_SIZE
];
512 TCGArg gen_opparam_buf
[OPPARAM_BUF_SIZE
];
514 uint16_t *gen_opc_ptr
;
515 TCGArg
*gen_opparam_ptr
;
516 target_ulong gen_opc_pc
[OPC_BUF_SIZE
];
517 uint16_t gen_opc_icount
[OPC_BUF_SIZE
];
518 uint8_t gen_opc_instr_start
[OPC_BUF_SIZE
];
520 /* Code generation */
521 int code_gen_max_blocks
;
522 uint8_t *code_gen_prologue
;
523 uint8_t *code_gen_buffer
;
524 size_t code_gen_buffer_size
;
525 /* threshold to flush the translated code buffer */
526 size_t code_gen_buffer_max_size
;
527 uint8_t *code_gen_ptr
;
531 /* The TCGBackendData structure is private to tcg-target.c. */
532 struct TCGBackendData
*be
;
535 extern TCGContext tcg_ctx
;
537 /* pool based memory allocation */
539 void *tcg_malloc_internal(TCGContext
*s
, int size
);
540 void tcg_pool_reset(TCGContext
*s
);
541 void tcg_pool_delete(TCGContext
*s
);
543 static inline void *tcg_malloc(int size
)
545 TCGContext
*s
= &tcg_ctx
;
546 uint8_t *ptr
, *ptr_end
;
547 size
= (size
+ sizeof(long) - 1) & ~(sizeof(long) - 1);
549 ptr_end
= ptr
+ size
;
550 if (unlikely(ptr_end
> s
->pool_end
)) {
551 return tcg_malloc_internal(&tcg_ctx
, size
);
553 s
->pool_cur
= ptr_end
;
558 void tcg_context_init(TCGContext
*s
);
559 void tcg_prologue_init(TCGContext
*s
);
560 void tcg_func_start(TCGContext
*s
);
562 int tcg_gen_code(TCGContext
*s
, uint8_t *gen_code_buf
);
563 int tcg_gen_code_search_pc(TCGContext
*s
, uint8_t *gen_code_buf
, long offset
);
565 void tcg_set_frame(TCGContext
*s
, int reg
, intptr_t start
, intptr_t size
);
567 TCGv_i32
tcg_global_reg_new_i32(int reg
, const char *name
);
568 TCGv_i32
tcg_global_mem_new_i32(int reg
, intptr_t offset
, const char *name
);
569 TCGv_i32
tcg_temp_new_internal_i32(int temp_local
);
570 static inline TCGv_i32
tcg_temp_new_i32(void)
572 return tcg_temp_new_internal_i32(0);
574 static inline TCGv_i32
tcg_temp_local_new_i32(void)
576 return tcg_temp_new_internal_i32(1);
578 void tcg_temp_free_i32(TCGv_i32 arg
);
579 char *tcg_get_arg_str_i32(TCGContext
*s
, char *buf
, int buf_size
, TCGv_i32 arg
);
581 TCGv_i64
tcg_global_reg_new_i64(int reg
, const char *name
);
582 TCGv_i64
tcg_global_mem_new_i64(int reg
, intptr_t offset
, const char *name
);
583 TCGv_i64
tcg_temp_new_internal_i64(int temp_local
);
584 static inline TCGv_i64
tcg_temp_new_i64(void)
586 return tcg_temp_new_internal_i64(0);
588 static inline TCGv_i64
tcg_temp_local_new_i64(void)
590 return tcg_temp_new_internal_i64(1);
592 void tcg_temp_free_i64(TCGv_i64 arg
);
593 char *tcg_get_arg_str_i64(TCGContext
*s
, char *buf
, int buf_size
, TCGv_i64 arg
);
595 #if defined(CONFIG_DEBUG_TCG)
596 /* If you call tcg_clear_temp_count() at the start of a section of
597 * code which is not supposed to leak any TCG temporaries, then
598 * calling tcg_check_temp_count() at the end of the section will
599 * return 1 if the section did in fact leak a temporary.
601 void tcg_clear_temp_count(void);
602 int tcg_check_temp_count(void);
604 #define tcg_clear_temp_count() do { } while (0)
605 #define tcg_check_temp_count() 0
608 void tcg_dump_info(FILE *f
, fprintf_function cpu_fprintf
);
610 #define TCG_CT_ALIAS 0x80
611 #define TCG_CT_IALIAS 0x40
612 #define TCG_CT_REG 0x01
613 #define TCG_CT_CONST 0x02 /* any constant of register size */
615 typedef struct TCGArgConstraint
{
623 #define TCG_MAX_OP_ARGS 16
625 /* Bits for TCGOpDef->flags, 8 bits available. */
627 /* Instruction defines the end of a basic block. */
628 TCG_OPF_BB_END
= 0x01,
629 /* Instruction clobbers call registers and potentially update globals. */
630 TCG_OPF_CALL_CLOBBER
= 0x02,
631 /* Instruction has side effects: it cannot be removed if its outputs
632 are not used, and might trigger exceptions. */
633 TCG_OPF_SIDE_EFFECTS
= 0x04,
634 /* Instruction operands are 64-bits (otherwise 32-bits). */
635 TCG_OPF_64BIT
= 0x08,
636 /* Instruction is optional and not implemented by the host, or insn
637 is generic and should not be implemened by the host. */
638 TCG_OPF_NOT_PRESENT
= 0x10,
641 typedef struct TCGOpDef
{
643 uint8_t nb_oargs
, nb_iargs
, nb_cargs
, nb_args
;
645 TCGArgConstraint
*args_ct
;
647 #if defined(CONFIG_DEBUG_TCG)
652 extern TCGOpDef tcg_op_defs
[];
653 extern const size_t tcg_op_defs_max
;
655 typedef struct TCGTargetOpDef
{
657 const char *args_ct_str
[TCG_MAX_OP_ARGS
];
660 #define tcg_abort() \
662 fprintf(stderr, "%s:%d: tcg fatal error\n", __FILE__, __LINE__);\
666 #ifdef CONFIG_DEBUG_TCG
667 # define tcg_debug_assert(X) do { assert(X); } while (0)
668 #elif QEMU_GNUC_PREREQ(4, 5)
669 # define tcg_debug_assert(X) \
670 do { if (!(X)) { __builtin_unreachable(); } } while (0)
672 # define tcg_debug_assert(X) do { (void)(X); } while (0)
675 void tcg_add_target_add_op_defs(const TCGTargetOpDef
*tdefs
);
677 #if UINTPTR_MAX == UINT32_MAX
678 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I32(n))
679 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I32(GET_TCGV_PTR(n))
681 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i32((intptr_t)(V)))
682 #define tcg_global_reg_new_ptr(R, N) \
683 TCGV_NAT_TO_PTR(tcg_global_reg_new_i32((R), (N)))
684 #define tcg_global_mem_new_ptr(R, O, N) \
685 TCGV_NAT_TO_PTR(tcg_global_mem_new_i32((R), (O), (N)))
686 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i32())
687 #define tcg_temp_free_ptr(T) tcg_temp_free_i32(TCGV_PTR_TO_NAT(T))
689 #define TCGV_NAT_TO_PTR(n) MAKE_TCGV_PTR(GET_TCGV_I64(n))
690 #define TCGV_PTR_TO_NAT(n) MAKE_TCGV_I64(GET_TCGV_PTR(n))
692 #define tcg_const_ptr(V) TCGV_NAT_TO_PTR(tcg_const_i64((intptr_t)(V)))
693 #define tcg_global_reg_new_ptr(R, N) \
694 TCGV_NAT_TO_PTR(tcg_global_reg_new_i64((R), (N)))
695 #define tcg_global_mem_new_ptr(R, O, N) \
696 TCGV_NAT_TO_PTR(tcg_global_mem_new_i64((R), (O), (N)))
697 #define tcg_temp_new_ptr() TCGV_NAT_TO_PTR(tcg_temp_new_i64())
698 #define tcg_temp_free_ptr(T) tcg_temp_free_i64(TCGV_PTR_TO_NAT(T))
701 void tcg_gen_callN(TCGContext
*s
, TCGv_ptr func
, unsigned int flags
,
702 int sizemask
, TCGArg ret
, int nargs
, TCGArg
*args
);
704 void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
705 int c
, int right
, int arith
);
707 TCGArg
*tcg_optimize(TCGContext
*s
, uint16_t *tcg_opc_ptr
, TCGArg
*args
,
708 TCGOpDef
*tcg_op_def
);
710 /* only used for debugging purposes */
711 void tcg_dump_ops(TCGContext
*s
);
713 void dump_ops(const uint16_t *opc_buf
, const TCGArg
*opparam_buf
);
714 TCGv_i32
tcg_const_i32(int32_t val
);
715 TCGv_i64
tcg_const_i64(int64_t val
);
716 TCGv_i32
tcg_const_local_i32(int32_t val
);
717 TCGv_i64
tcg_const_local_i64(int64_t val
);
721 * @env: CPUArchState * for the CPU
722 * @tb_ptr: address of generated code for the TB to execute
724 * Start executing code from a given translation block.
725 * Where translation blocks have been linked, execution
726 * may proceed from the given TB into successive ones.
727 * Control eventually returns only when some action is needed
728 * from the top-level loop: either control must pass to a TB
729 * which has not yet been directly linked, or an asynchronous
730 * event such as an interrupt needs handling.
732 * The return value is a pointer to the next TB to execute
733 * (if known; otherwise zero). This pointer is assumed to be
734 * 4-aligned, and the bottom two bits are used to return further
736 * 0, 1: the link between this TB and the next is via the specified
737 * TB index (0 or 1). That is, we left the TB via (the equivalent
738 * of) "goto_tb <index>". The main loop uses this to determine
739 * how to link the TB just executed to the next.
740 * 2: we are using instruction counting code generation, and we
741 * did not start executing this TB because the instruction counter
742 * would hit zero midway through it. In this case the next-TB pointer
743 * returned is the TB we were about to execute, and the caller must
744 * arrange to execute the remaining count of instructions.
745 * 3: we stopped because the CPU's exit_request flag was set
746 * (usually meaning that there is an interrupt that needs to be
747 * handled). The next-TB pointer returned is the TB we were
748 * about to execute when we noticed the pending exit request.
750 * If the bottom two bits indicate an exit-via-index then the CPU
751 * state is correctly synchronised and ready for execution of the next
752 * TB (and in particular the guest PC is the address to execute next).
753 * Otherwise, we gave up on execution of this TB before it started, and
754 * the caller must fix up the CPU state by calling cpu_pc_from_tb()
755 * with the next-TB pointer we return.
757 * Note that TCG targets may use a different definition of tcg_qemu_tb_exec
758 * to this default (which just calls the prologue.code emitted by
759 * tcg_target_qemu_prologue()).
761 #define TB_EXIT_MASK 3
762 #define TB_EXIT_IDX0 0
763 #define TB_EXIT_IDX1 1
764 #define TB_EXIT_ICOUNT_EXPIRED 2
765 #define TB_EXIT_REQUESTED 3
767 #if !defined(tcg_qemu_tb_exec)
768 # define tcg_qemu_tb_exec(env, tb_ptr) \
769 ((uintptr_t (*)(void *, void *))tcg_ctx.code_gen_prologue)(env, tb_ptr)
772 void tcg_register_jit(void *buf
, size_t buf_size
);
775 * Memory helpers that will be used by TCG generated code.
777 #ifdef CONFIG_SOFTMMU
778 /* Value zero-extended to tcg register size. */
779 tcg_target_ulong
helper_ret_ldub_mmu(CPUArchState
*env
, target_ulong addr
,
780 int mmu_idx
, uintptr_t retaddr
);
781 tcg_target_ulong
helper_le_lduw_mmu(CPUArchState
*env
, target_ulong addr
,
782 int mmu_idx
, uintptr_t retaddr
);
783 tcg_target_ulong
helper_le_ldul_mmu(CPUArchState
*env
, target_ulong addr
,
784 int mmu_idx
, uintptr_t retaddr
);
785 uint64_t helper_le_ldq_mmu(CPUArchState
*env
, target_ulong addr
,
786 int mmu_idx
, uintptr_t retaddr
);
787 tcg_target_ulong
helper_be_lduw_mmu(CPUArchState
*env
, target_ulong addr
,
788 int mmu_idx
, uintptr_t retaddr
);
789 tcg_target_ulong
helper_be_ldul_mmu(CPUArchState
*env
, target_ulong addr
,
790 int mmu_idx
, uintptr_t retaddr
);
791 uint64_t helper_be_ldq_mmu(CPUArchState
*env
, target_ulong addr
,
792 int mmu_idx
, uintptr_t retaddr
);
794 /* Value sign-extended to tcg register size. */
795 tcg_target_ulong
helper_ret_ldsb_mmu(CPUArchState
*env
, target_ulong addr
,
796 int mmu_idx
, uintptr_t retaddr
);
797 tcg_target_ulong
helper_le_ldsw_mmu(CPUArchState
*env
, target_ulong addr
,
798 int mmu_idx
, uintptr_t retaddr
);
799 tcg_target_ulong
helper_le_ldsl_mmu(CPUArchState
*env
, target_ulong addr
,
800 int mmu_idx
, uintptr_t retaddr
);
801 tcg_target_ulong
helper_be_ldsw_mmu(CPUArchState
*env
, target_ulong addr
,
802 int mmu_idx
, uintptr_t retaddr
);
803 tcg_target_ulong
helper_be_ldsl_mmu(CPUArchState
*env
, target_ulong addr
,
804 int mmu_idx
, uintptr_t retaddr
);
806 void helper_ret_stb_mmu(CPUArchState
*env
, target_ulong addr
, uint8_t val
,
807 int mmu_idx
, uintptr_t retaddr
);
808 void helper_le_stw_mmu(CPUArchState
*env
, target_ulong addr
, uint16_t val
,
809 int mmu_idx
, uintptr_t retaddr
);
810 void helper_le_stl_mmu(CPUArchState
*env
, target_ulong addr
, uint32_t val
,
811 int mmu_idx
, uintptr_t retaddr
);
812 void helper_le_stq_mmu(CPUArchState
*env
, target_ulong addr
, uint64_t val
,
813 int mmu_idx
, uintptr_t retaddr
);
814 void helper_be_stw_mmu(CPUArchState
*env
, target_ulong addr
, uint16_t val
,
815 int mmu_idx
, uintptr_t retaddr
);
816 void helper_be_stl_mmu(CPUArchState
*env
, target_ulong addr
, uint32_t val
,
817 int mmu_idx
, uintptr_t retaddr
);
818 void helper_be_stq_mmu(CPUArchState
*env
, target_ulong addr
, uint64_t val
,
819 int mmu_idx
, uintptr_t retaddr
);
821 /* Temporary aliases until backends are converted. */
822 #ifdef TARGET_WORDS_BIGENDIAN
823 # define helper_ret_ldsw_mmu helper_be_ldsw_mmu
824 # define helper_ret_lduw_mmu helper_be_lduw_mmu
825 # define helper_ret_ldsl_mmu helper_be_ldsl_mmu
826 # define helper_ret_ldul_mmu helper_be_ldul_mmu
827 # define helper_ret_ldq_mmu helper_be_ldq_mmu
828 # define helper_ret_stw_mmu helper_be_stw_mmu
829 # define helper_ret_stl_mmu helper_be_stl_mmu
830 # define helper_ret_stq_mmu helper_be_stq_mmu
832 # define helper_ret_ldsw_mmu helper_le_ldsw_mmu
833 # define helper_ret_lduw_mmu helper_le_lduw_mmu
834 # define helper_ret_ldsl_mmu helper_le_ldsl_mmu
835 # define helper_ret_ldul_mmu helper_le_ldul_mmu
836 # define helper_ret_ldq_mmu helper_le_ldq_mmu
837 # define helper_ret_stw_mmu helper_le_stw_mmu
838 # define helper_ret_stl_mmu helper_le_stl_mmu
839 # define helper_ret_stq_mmu helper_le_stq_mmu
842 uint8_t helper_ldb_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
843 uint16_t helper_ldw_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
844 uint32_t helper_ldl_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
845 uint64_t helper_ldq_mmu(CPUArchState
*env
, target_ulong addr
, int mmu_idx
);
847 void helper_stb_mmu(CPUArchState
*env
, target_ulong addr
,
848 uint8_t val
, int mmu_idx
);
849 void helper_stw_mmu(CPUArchState
*env
, target_ulong addr
,
850 uint16_t val
, int mmu_idx
);
851 void helper_stl_mmu(CPUArchState
*env
, target_ulong addr
,
852 uint32_t val
, int mmu_idx
);
853 void helper_stq_mmu(CPUArchState
*env
, target_ulong addr
,
854 uint64_t val
, int mmu_idx
);
855 #endif /* CONFIG_SOFTMMU */