Bug 431556 - Complete arm64 FADDP v8.2 instruction support started in 413547.
[valgrind.git] / VEX / priv / guest_arm64_defs.h
bloba8d1685510908483c965655d0c71051f363c9989
2 /*---------------------------------------------------------------*/
3 /*--- begin guest_arm64_defs.h ---*/
4 /*---------------------------------------------------------------*/
5 /*
6 This file is part of Valgrind, a dynamic binary instrumentation
7 framework.
9 Copyright (C) 2013-2017 OpenWorks
10 info@open-works.net
12 This program is free software; you can redistribute it and/or
13 modify it under the terms of the GNU General Public License as
14 published by the Free Software Foundation; either version 2 of the
15 License, or (at your option) any later version.
17 This program is distributed in the hope that it will be useful, but
18 WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with this program; if not, see <http://www.gnu.org/licenses/>.
25 The GNU General Public License is contained in the file COPYING.
28 #ifndef __VEX_GUEST_ARM64_DEFS_H
29 #define __VEX_GUEST_ARM64_DEFS_H
31 #include "libvex_basictypes.h"
32 #include "guest_generic_bb_to_IR.h" // DisResult
34 /*---------------------------------------------------------*/
35 /*--- arm64 to IR conversion ---*/
36 /*---------------------------------------------------------*/
38 /* Convert one ARM64 insn to IR. See the type DisOneInstrFn in
39 guest_generic_bb_to_IR.h. */
40 extern
41 DisResult disInstr_ARM64 ( IRSB* irbb,
42 const UChar* guest_code,
43 Long delta,
44 Addr guest_IP,
45 VexArch guest_arch,
46 const VexArchInfo* archinfo,
47 const VexAbiInfo* abiinfo,
48 VexEndness host_endness,
49 Bool sigill_diag );
51 /* Used by the optimiser to specialise calls to helpers. */
52 extern
53 IRExpr* guest_arm64_spechelper ( const HChar* function_name,
54 IRExpr** args,
55 IRStmt** precedingStmts,
56 Int n_precedingStmts );
58 /* Describes to the optimser which part of the guest state require
59 precise memory exceptions. This is logically part of the guest
60 state description. */
61 extern
62 Bool guest_arm64_state_requires_precise_mem_exns ( Int, Int,
63 VexRegisterUpdates );
65 extern
66 VexGuestLayout arm64Guest_layout;
69 /*---------------------------------------------------------*/
70 /*--- arm64 guest helpers ---*/
71 /*---------------------------------------------------------*/
73 /* --- CLEAN HELPERS --- */
75 /* Calculate NZCV from the supplied thunk components, in the positions
76 they appear in the CPSR, viz bits 31:28 for N Z C V respectively.
77 Returned bits 63:32 and 27:0 are zero. */
78 extern
79 ULong arm64g_calculate_flags_nzcv ( ULong cc_op, ULong cc_dep1,
80 ULong cc_dep2, ULong cc_dep3 );
82 /* Calculate the C flag from the thunk components, in the lowest bit
83 of the word (bit 0). */
84 extern
85 ULong arm64g_calculate_flag_c ( ULong cc_op, ULong cc_dep1,
86 ULong cc_dep2, ULong cc_dep3 );
88 //ZZ /* Calculate the V flag from the thunk components, in the lowest bit
89 //ZZ of the word (bit 0). */
90 //ZZ extern
91 //ZZ UInt armg_calculate_flag_v ( UInt cc_op, UInt cc_dep1,
92 //ZZ UInt cc_dep2, UInt cc_dep3 );
93 //ZZ
94 /* Calculate the specified condition from the thunk components, in the
95 lowest bit of the word (bit 0). */
96 extern
97 ULong arm64g_calculate_condition ( /* ARM64Condcode << 4 | cc_op */
98 ULong cond_n_op ,
99 ULong cc_dep1,
100 ULong cc_dep2, ULong cc_dep3 );
102 //ZZ /* Calculate the QC flag from the thunk components, in the lowest bit
103 //ZZ of the word (bit 0). */
104 //ZZ extern
105 //ZZ UInt armg_calculate_flag_qc ( UInt resL1, UInt resL2,
106 //ZZ UInt resR1, UInt resR2 );
108 extern ULong arm64g_calc_crc32b ( ULong acc, ULong bits );
109 extern ULong arm64g_calc_crc32h ( ULong acc, ULong bits );
110 extern ULong arm64g_calc_crc32w ( ULong acc, ULong bits );
111 extern ULong arm64g_calc_crc32x ( ULong acc, ULong bits );
113 extern ULong arm64g_calc_crc32cb ( ULong acc, ULong bits );
114 extern ULong arm64g_calc_crc32ch ( ULong acc, ULong bits );
115 extern ULong arm64g_calc_crc32cw ( ULong acc, ULong bits );
116 extern ULong arm64g_calc_crc32cx ( ULong acc, ULong bits );
118 /* --- DIRTY HELPERS --- */
120 extern ULong arm64g_dirtyhelper_MRS_CNTVCT_EL0 ( void );
122 extern ULong arm64g_dirtyhelper_MRS_CNTFRQ_EL0 ( void );
124 extern void arm64g_dirtyhelper_PMULLQ ( /*OUT*/V128* res,
125 ULong arg1, ULong arg2 );
127 extern void arm64g_dirtyhelper_AESE ( /*OUT*/V128* res,
128 ULong argHi, ULong argLo );
129 extern void arm64g_dirtyhelper_AESD ( /*OUT*/V128* res,
130 ULong argHi, ULong argLo );
131 extern void arm64g_dirtyhelper_AESMC ( /*OUT*/V128* res,
132 ULong argHi, ULong argLo );
133 extern void arm64g_dirtyhelper_AESIMC ( /*OUT*/V128* res,
134 ULong argHi, ULong argLo );
136 extern
137 void arm64g_dirtyhelper_SHA1C ( /*OUT*/V128* res, ULong dHi, ULong dLo,
138 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
139 extern
140 void arm64g_dirtyhelper_SHA1H ( /*OUT*/V128* res,
141 ULong nHi, ULong nLo );
142 extern
143 void arm64g_dirtyhelper_SHA1M ( /*OUT*/V128* res, ULong dHi, ULong dLo,
144 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
145 extern
146 void arm64g_dirtyhelper_SHA1P ( /*OUT*/V128* res, ULong dHi, ULong dLo,
147 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
148 extern
149 void arm64g_dirtyhelper_SHA1SU0 ( /*OUT*/V128* res, ULong dHi, ULong dLo,
150 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
151 extern
152 void arm64g_dirtyhelper_SHA1SU1 ( /*OUT*/V128* res, ULong dHi, ULong dLo,
153 ULong nHi, ULong nLo );
154 extern
155 void arm64g_dirtyhelper_SHA256H2 ( /*OUT*/V128* res, ULong dHi, ULong dLo,
156 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
157 extern
158 void arm64g_dirtyhelper_SHA256H ( /*OUT*/V128* res, ULong dHi, ULong dLo,
159 ULong nHi, ULong nLo, ULong mHi, ULong mLo );
160 extern
161 void arm64g_dirtyhelper_SHA256SU0 ( /*OUT*/V128* res, ULong dHi, ULong dLo,
162 ULong nHi, ULong nLo );
163 extern
164 void arm64g_dirtyhelper_SHA256SU1 ( /*OUT*/V128* res, ULong dHi, ULong dLo,
165 ULong nHi, ULong nLo,
166 ULong mHi, ULong mLo );
169 /*---------------------------------------------------------*/
170 /*--- Condition code stuff ---*/
171 /*---------------------------------------------------------*/
173 /* Flag masks. Defines positions of flag bits in the NZCV
174 register. */
175 #define ARM64G_CC_SHIFT_N 31
176 #define ARM64G_CC_SHIFT_Z 30
177 #define ARM64G_CC_SHIFT_C 29
178 #define ARM64G_CC_SHIFT_V 28
179 //ZZ #define ARMG_CC_SHIFT_Q 27
180 //ZZ
181 //ZZ #define ARMG_CC_MASK_N (1 << ARMG_CC_SHIFT_N)
182 //ZZ #define ARMG_CC_MASK_Z (1 << ARMG_CC_SHIFT_Z)
183 //ZZ #define ARMG_CC_MASK_C (1 << ARMG_CC_SHIFT_C)
184 //ZZ #define ARMG_CC_MASK_V (1 << ARMG_CC_SHIFT_V)
185 //ZZ #define ARMG_CC_MASK_Q (1 << ARMG_CC_SHIFT_Q)
187 /* Flag thunk descriptors. A four-word thunk is used to record
188 details of the most recent flag-setting operation, so NZCV can
189 be computed later if needed.
191 The four words are:
193 CC_OP, which describes the operation.
195 CC_DEP1, CC_DEP2, CC_NDEP. These are arguments to the
196 operation. We want set up the mcx_masks in flag helper calls
197 involving these fields so that Memcheck "believes" that the
198 resulting flags are data-dependent on both CC_DEP1 and
199 CC_DEP2. Hence the name DEP.
201 When building the thunk, it is always necessary to write words into
202 CC_DEP1/2 and NDEP, even if those args are not used given the CC_OP
203 field. This is important because otherwise Memcheck could give
204 false positives as it does not understand the relationship between
205 the CC_OP field and CC_DEP1/2/NDEP, and so believes that the
206 definedness of the stored flags always depends on all 3 DEP values.
208 A summary of the field usages is:
210 OP DEP1 DEP2 DEP3
211 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
213 OP_COPY curr_NZCV:28x0 unused unused
214 OP_ADD32 argL argR unused
215 OP_ADD64 argL argR unused
216 OP_SUB32 argL argR unused
217 OP_SUB64 argL argR unused
218 OP_ADC32 argL argR 63x0:old_C
219 OP_ADC64 argL argR 63x0:old_C
220 OP_SBC32 argL argR 63x0:old_C
221 OP_SBC64 argL argR 63x0:old_C
222 OP_LOGIC32 result unused unused
223 OP_LOGIC64 result unused unused
224 //ZZ OP_MUL result unused 30x0:old_C:old_V
225 //ZZ OP_MULL resLO32 resHI32 30x0:old_C:old_V
226 //ZZ */
228 enum {
229 ARM64G_CC_OP_COPY=0, /* DEP1 = NZCV in 31:28, DEP2 = 0, DEP3 = 0
230 just copy DEP1 to output */
232 ARM64G_CC_OP_ADD32, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
233 DEP3 = 0 */
235 ARM64G_CC_OP_ADD64, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
236 DEP3 = 0 */
238 ARM64G_CC_OP_SUB32, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
239 DEP3 = 0 */
241 ARM64G_CC_OP_SUB64, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
242 DEP3 = 0 */
244 ARM64G_CC_OP_ADC32, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
245 DEP3 = oldC (in LSB) */
247 ARM64G_CC_OP_ADC64, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
248 DEP3 = oldC (in LSB) */
250 ARM64G_CC_OP_SBC32, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
251 DEP3 = oldC (in LSB) */
253 ARM64G_CC_OP_SBC64, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
254 DEP3 = oldC (in LSB) */
256 ARM64G_CC_OP_LOGIC32, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */
257 ARM64G_CC_OP_LOGIC64, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */
259 //ZZ ARMG_CC_OP_MUL, /* DEP1 = result, DEP2 = 0, DEP3 = oldC:old_V
260 //ZZ (in bits 1:0) */
261 //ZZ
262 //ZZ ARMG_CC_OP_MULL, /* DEP1 = resLO32, DEP2 = resHI32, DEP3 = oldC:old_V
263 //ZZ (in bits 1:0) */
265 ARM64G_CC_OP_NUMBER
268 /* XXXX because of the calling conventions for
269 arm64g_calculate_condition, all these OP values MUST be in the range
270 0 .. 15 only (viz, 4-bits). */
274 /* Defines conditions which we can ask for */
276 typedef
277 enum {
278 ARM64CondEQ = 0, /* equal : Z=1 */
279 ARM64CondNE = 1, /* not equal : Z=0 */
281 ARM64CondCS = 2, /* >=u (higher or same) (aka HS) : C=1 */
282 ARM64CondCC = 3, /* <u (lower) (aka LO) : C=0 */
284 ARM64CondMI = 4, /* minus (negative) : N=1 */
285 ARM64CondPL = 5, /* plus (zero or +ve) : N=0 */
287 ARM64CondVS = 6, /* overflow : V=1 */
288 ARM64CondVC = 7, /* no overflow : V=0 */
290 ARM64CondHI = 8, /* >u (higher) : C=1 && Z=0 */
291 ARM64CondLS = 9, /* <=u (lower or same) : C=0 || Z=1 */
293 ARM64CondGE = 10, /* >=s (signed greater or equal) : N=V */
294 ARM64CondLT = 11, /* <s (signed less than) : N!=V */
296 ARM64CondGT = 12, /* >s (signed greater) : Z=0 && N=V */
297 ARM64CondLE = 13, /* <=s (signed less or equal) : Z=1 || N!=V */
299 ARM64CondAL = 14, /* always (unconditional) : 1 */
300 ARM64CondNV = 15 /* always (unconditional) : 1 */
302 ARM64Condcode;
304 /* Vector element size specifiers */
306 typedef
307 enum {
308 ARM64VSizeH = 0, /* 16 bits (integer halfword or half-precision FP) */
309 ARM64VSizeS = 1, /* 32 bits (integer shortword or single-precision FP) */
310 ARM64VSizeD = 2 /* 64 bits (integer word or double-precision FP) */
312 ARM64VecESize;
315 #endif /* ndef __VEX_GUEST_ARM64_DEFS_H */
317 /*---------------------------------------------------------------*/
318 /*--- end guest_arm64_defs.h ---*/
319 /*---------------------------------------------------------------*/