1 #ifndef TARGET_ARM_TRANSLATE_H
2 #define TARGET_ARM_TRANSLATE_H
5 typedef struct DisasContext
{
9 /* Nonzero if this instruction has been conditionally skipped. */
11 /* The label that will be jumped to when the instruction is skipped. */
13 /* Thumb-2 conditional execution bits. */
16 struct TranslationBlock
*tb
;
17 int singlestep_enabled
;
20 #if !defined(CONFIG_USER_ONLY)
23 ARMMMUIdx mmu_idx
; /* MMU index to use for normal loads/stores */
24 bool ns
; /* Use non-secure CPREG bank on access */
25 bool cpacr_fpen
; /* FP enabled via CPACR.FPEN */
26 bool vfp_enabled
; /* FP enabled via FPSCR.EN */
29 /* Immediate value in AArch32 SVC insn; must be set if is_jmp == DISAS_SWI
30 * so that top level loop can generate correct syndrome information.
36 uint64_t features
; /* CPU features bits */
37 /* Because unallocated encodings generate different exception syndrome
38 * information from traps due to FP being disabled, we can't do a single
39 * "is fp access disabled" check at a high level in the decode tree.
40 * To help in catching bugs where the access check was forgotten in some
41 * code path, we set this flag when the access check is done, and assert
42 * that it is set at the point where we actually touch the FP regs.
44 bool fp_access_checked
;
45 /* ARMv8 single-step state (this is distinct from the QEMU gdbstub
46 * single-step support).
50 /* True if the insn just emitted was a load-exclusive instruction
51 * (necessary for syndrome information for single step exceptions),
52 * ie A64 LDX*, LDAX*, A32/T32 LDREX*, LDAEX*.
55 /* True if a single-step exception will be taken to the current EL */
57 /* Bottom two bits of XScale c15_cpar coprocessor access control reg */
59 #define TMP_A64_MAX 16
61 TCGv_i64 tmp_a64
[TMP_A64_MAX
];
64 extern TCGv_ptr cpu_env
;
66 static inline int arm_dc_feature(DisasContext
*dc
, int feature
)
68 return (dc
->features
& (1ULL << feature
)) != 0;
71 static inline int get_mem_index(DisasContext
*s
)
76 /* target-specific extra values for is_jmp */
77 /* These instructions trap after executing, so the A32/T32 decoder must
78 * defer them until after the conditional execution state has been updated.
79 * WFI also needs special handling when single-stepping.
83 /* For instructions which unconditionally cause an exception we can skip
84 * emitting unreachable code at the end of the TB in the A64 decoder
93 void a64_translate_init(void);
94 void gen_intermediate_code_internal_a64(ARMCPU
*cpu
,
97 void gen_a64_set_pc_im(uint64_t val
);
98 void aarch64_cpu_dump_state(CPUState
*cs
, FILE *f
,
99 fprintf_function cpu_fprintf
, int flags
);
101 static inline void a64_translate_init(void)
105 static inline void gen_intermediate_code_internal_a64(ARMCPU
*cpu
,
106 TranslationBlock
*tb
,
111 static inline void gen_a64_set_pc_im(uint64_t val
)
115 static inline void aarch64_cpu_dump_state(CPUState
*cs
, FILE *f
,
116 fprintf_function cpu_fprintf
,
122 void arm_gen_test_cc(int cc
, int label
);
124 #endif /* TARGET_ARM_TRANSLATE_H */