Bug 439685 compiler warning in callgrind/main.c
[valgrind.git] / VEX / priv / host_arm64_defs.h
blobdc686dff7fc397579105d7c3ddadaf120c2594ad
2 /*---------------------------------------------------------------*/
3 /*--- begin host_arm64_defs.h ---*/
4 /*---------------------------------------------------------------*/
6 /*
7 This file is part of Valgrind, a dynamic binary instrumentation
8 framework.
10 Copyright (C) 2013-2017 OpenWorks
11 info@open-works.net
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, see <http://www.gnu.org/licenses/>.
26 The GNU General Public License is contained in the file COPYING.
29 #ifndef __VEX_HOST_ARM64_DEFS_H
30 #define __VEX_HOST_ARM64_DEFS_H
32 #include "libvex_basictypes.h"
33 #include "libvex.h" // VexArch
34 #include "host_generic_regs.h" // HReg
37 /* --------- Registers. --------- */
39 #define ST_IN static inline
40 ST_IN HReg hregARM64_X22 ( void ) { return mkHReg(False, HRcInt64, 22, 0); }
41 ST_IN HReg hregARM64_X23 ( void ) { return mkHReg(False, HRcInt64, 23, 1); }
42 ST_IN HReg hregARM64_X24 ( void ) { return mkHReg(False, HRcInt64, 24, 2); }
43 ST_IN HReg hregARM64_X25 ( void ) { return mkHReg(False, HRcInt64, 25, 3); }
44 ST_IN HReg hregARM64_X26 ( void ) { return mkHReg(False, HRcInt64, 26, 4); }
45 ST_IN HReg hregARM64_X27 ( void ) { return mkHReg(False, HRcInt64, 27, 5); }
46 ST_IN HReg hregARM64_X28 ( void ) { return mkHReg(False, HRcInt64, 28, 6); }
48 ST_IN HReg hregARM64_X0 ( void ) { return mkHReg(False, HRcInt64, 0, 7); }
49 ST_IN HReg hregARM64_X1 ( void ) { return mkHReg(False, HRcInt64, 1, 8); }
50 ST_IN HReg hregARM64_X2 ( void ) { return mkHReg(False, HRcInt64, 2, 9); }
51 ST_IN HReg hregARM64_X3 ( void ) { return mkHReg(False, HRcInt64, 3, 10); }
52 ST_IN HReg hregARM64_X4 ( void ) { return mkHReg(False, HRcInt64, 4, 11); }
53 ST_IN HReg hregARM64_X5 ( void ) { return mkHReg(False, HRcInt64, 5, 12); }
54 ST_IN HReg hregARM64_X6 ( void ) { return mkHReg(False, HRcInt64, 6, 13); }
55 ST_IN HReg hregARM64_X7 ( void ) { return mkHReg(False, HRcInt64, 7, 14); }
57 ST_IN HReg hregARM64_Q16 ( void ) { return mkHReg(False, HRcVec128, 16, 15); }
58 ST_IN HReg hregARM64_Q17 ( void ) { return mkHReg(False, HRcVec128, 17, 16); }
59 ST_IN HReg hregARM64_Q18 ( void ) { return mkHReg(False, HRcVec128, 18, 17); }
60 ST_IN HReg hregARM64_Q19 ( void ) { return mkHReg(False, HRcVec128, 19, 18); }
61 ST_IN HReg hregARM64_Q20 ( void ) { return mkHReg(False, HRcVec128, 20, 19); }
63 ST_IN HReg hregARM64_D8 ( void ) { return mkHReg(False, HRcFlt64, 8, 20); }
64 ST_IN HReg hregARM64_D9 ( void ) { return mkHReg(False, HRcFlt64, 9, 21); }
65 ST_IN HReg hregARM64_D10 ( void ) { return mkHReg(False, HRcFlt64, 10, 22); }
66 ST_IN HReg hregARM64_D11 ( void ) { return mkHReg(False, HRcFlt64, 11, 23); }
67 ST_IN HReg hregARM64_D12 ( void ) { return mkHReg(False, HRcFlt64, 12, 24); }
68 ST_IN HReg hregARM64_D13 ( void ) { return mkHReg(False, HRcFlt64, 13, 25); }
70 ST_IN HReg hregARM64_X8 ( void ) { return mkHReg(False, HRcInt64, 8, 26); }
71 ST_IN HReg hregARM64_X9 ( void ) { return mkHReg(False, HRcInt64, 9, 27); }
72 ST_IN HReg hregARM64_X21 ( void ) { return mkHReg(False, HRcInt64, 21, 28); }
74 // This is the integer register with encoding 31. Be *very* careful how you
75 // use it, since its meaning is dependent on the instruction and indeed even
76 // the position within an instruction, that it appears. It denotes either the
77 // zero register or the stack pointer.
78 ST_IN HReg hregARM64_XZR_XSP ( void ) { return mkHReg(False,
79 HRcInt64, 31, 29); }
80 #undef ST_IN
82 extern UInt ppHRegARM64 ( HReg );
84 /* Number of registers used arg passing in function calls */
85 #define ARM64_N_ARGREGS 8 /* x0 .. x7 */
88 /* --------- Condition codes. --------- */
90 typedef
91 enum {
92 ARM64cc_EQ = 0, /* equal : Z=1 */
93 ARM64cc_NE = 1, /* not equal : Z=0 */
95 ARM64cc_CS = 2, /* >=u (higher or same) : C=1 */
96 ARM64cc_CC = 3, /* <u (lower) : C=0 */
98 ARM64cc_MI = 4, /* minus (negative) : N=1 */
99 ARM64cc_PL = 5, /* plus (zero or +ve) : N=0 */
101 ARM64cc_VS = 6, /* overflow : V=1 */
102 ARM64cc_VC = 7, /* no overflow : V=0 */
104 ARM64cc_HI = 8, /* >u (higher) : C=1 && Z=0 */
105 ARM64cc_LS = 9, /* <=u (lower or same) : !(C=1 && Z=0) */
107 ARM64cc_GE = 10, /* >=s (signed greater or equal) : N=V */
108 ARM64cc_LT = 11, /* <s (signed less than) : !(N=V) */
110 ARM64cc_GT = 12, /* >s (signed greater) : Z=0 && N=V */
111 ARM64cc_LE = 13, /* <=s (signed less or equal) : !(Z=0 && N=V) */
113 ARM64cc_AL = 14, /* always (unconditional) */
114 ARM64cc_NV = 15 /* in 64-bit mode also means "always" */
116 ARM64CondCode;
119 /* --------- Memory address expressions (amodes). --------- */
121 typedef
122 enum {
123 ARM64am_RI9=10, /* reg + simm9 */
124 ARM64am_RI12, /* reg + uimm12 * szB (iow, scaled by access size) */
125 ARM64am_RR /* reg1 + reg2 */
127 ARM64AModeTag;
129 typedef
130 struct {
131 ARM64AModeTag tag;
132 union {
133 struct {
134 HReg reg;
135 Int simm9; /* -256 .. +255 */
136 } RI9;
137 struct {
138 HReg reg;
139 UInt uimm12; /* 0 .. 4095 */
140 UChar szB; /* 1, 2, 4, 8 (16 ?) */
141 } RI12;
142 struct {
143 HReg base;
144 HReg index;
145 } RR;
146 } ARM64am;
148 ARM64AMode;
150 extern ARM64AMode* ARM64AMode_RI9 ( HReg reg, Int simm9 );
151 extern ARM64AMode* ARM64AMode_RI12 ( HReg reg, Int uimm12, UChar szB );
152 extern ARM64AMode* ARM64AMode_RR ( HReg base, HReg index );
155 /* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
157 typedef
158 enum {
159 ARM64riA_I12=20, /* uimm12 << 0 or 12 only */
160 ARM64riA_R /* reg */
162 ARM64RIATag;
164 typedef
165 struct {
166 ARM64RIATag tag;
167 union {
168 struct {
169 UShort imm12; /* 0 .. 4095 */
170 UChar shift; /* 0 or 12 only */
171 } I12;
172 struct {
173 HReg reg;
174 } R;
175 } ARM64riA;
177 ARM64RIA;
179 extern ARM64RIA* ARM64RIA_I12 ( UShort imm12, UChar shift );
180 extern ARM64RIA* ARM64RIA_R ( HReg );
183 /* --------- Reg or "bitfield" (logic immediate) operands --------- */
185 typedef
186 enum {
187 ARM64riL_I13=6, /* wierd-o bitfield immediate, 13 bits in total */
188 ARM64riL_R /* reg */
190 ARM64RILTag;
192 typedef
193 struct {
194 ARM64RILTag tag;
195 union {
196 struct {
197 UChar bitN; /* 0 .. 1 */
198 UChar immR; /* 0 .. 63 */
199 UChar immS; /* 0 .. 63 */
200 } I13;
201 struct {
202 HReg reg;
203 } R;
204 } ARM64riL;
206 ARM64RIL;
208 extern ARM64RIL* ARM64RIL_I13 ( UChar bitN, UChar immR, UChar immS );
209 extern ARM64RIL* ARM64RIL_R ( HReg );
212 /* --------------- Reg or uimm6 operands --------------- */
214 typedef
215 enum {
216 ARM64ri6_I6=30, /* uimm6, 1 .. 63 only */
217 ARM64ri6_R /* reg */
219 ARM64RI6Tag;
221 typedef
222 struct {
223 ARM64RI6Tag tag;
224 union {
225 struct {
226 UInt imm6; /* 1 .. 63 */
227 } I6;
228 struct {
229 HReg reg;
230 } R;
231 } ARM64ri6;
233 ARM64RI6;
235 extern ARM64RI6* ARM64RI6_I6 ( UInt imm6 );
236 extern ARM64RI6* ARM64RI6_R ( HReg );
239 /* --------------------- Instructions --------------------- */
241 typedef
242 enum {
243 ARM64lo_AND=40,
244 ARM64lo_OR,
245 ARM64lo_XOR
247 ARM64LogicOp;
249 typedef
250 enum {
251 ARM64sh_SHL=50,
252 ARM64sh_SHR,
253 ARM64sh_SAR
255 ARM64ShiftOp;
257 typedef
258 enum {
259 ARM64rrs_ADD=54,
260 ARM64rrs_SUB,
261 ARM64rrs_AND,
262 ARM64rrs_OR,
263 ARM64rrs_XOR,
264 ARM64rrs_INVALID
266 ARM64RRSOp;
268 typedef
269 enum {
270 ARM64un_NEG=60,
271 ARM64un_NOT,
272 ARM64un_CLZ,
274 ARM64UnaryOp;
276 typedef
277 enum {
278 ARM64mul_PLAIN=70, /* lo64(64 * 64) */
279 ARM64mul_ZX, /* hi64(64 *u 64) */
280 ARM64mul_SX /* hi64(64 *s 64) */
282 ARM64MulOp;
284 typedef
285 /* These characterise an integer-FP conversion, but don't imply any
286 particular direction. */
287 enum {
288 ARM64cvt_F32_I32S=80,
289 ARM64cvt_F64_I32S,
290 ARM64cvt_F32_I64S,
291 ARM64cvt_F64_I64S,
292 ARM64cvt_F32_I32U,
293 ARM64cvt_F64_I32U,
294 ARM64cvt_F32_I64U,
295 ARM64cvt_F64_I64U,
296 ARM64cvt_INVALID
298 ARM64CvtOp;
300 typedef
301 enum {
302 ARM64fpb_ADD=100,
303 ARM64fpb_SUB,
304 ARM64fpb_MUL,
305 ARM64fpb_DIV,
306 ARM64fpb_INVALID
308 ARM64FpBinOp;
310 typedef
311 enum {
312 ARM64fpt_FMADD=105,
313 ARM64fpt_FMSUB,
314 ARM64fpt_INVALID
316 ARM64FpTriOp;
318 typedef
319 enum {
320 ARM64fpu_NEG=110,
321 ARM64fpu_ABS,
322 ARM64fpu_SQRT,
323 ARM64fpu_RINT,
324 ARM64fpu_RECPX,
325 ARM64fpu_INVALID
327 ARM64FpUnaryOp;
329 typedef
330 enum {
331 ARM64vecb_ADD64x2=120, ARM64vecb_ADD32x4,
332 ARM64vecb_ADD16x8, ARM64vecb_ADD8x16,
333 ARM64vecb_SUB64x2, ARM64vecb_SUB32x4,
334 ARM64vecb_SUB16x8, ARM64vecb_SUB8x16,
335 ARM64vecb_MUL32x4,
336 ARM64vecb_MUL16x8, ARM64vecb_MUL8x16,
337 ARM64vecb_FADD64x2, ARM64vecb_FADD32x4,
338 ARM64vecb_FADD16x8,
339 ARM64vecb_FSUB64x2, ARM64vecb_FSUB32x4,
340 ARM64vecb_FSUB16x8,
341 ARM64vecb_FMUL64x2, ARM64vecb_FMUL32x4,
342 ARM64vecb_FDIV64x2, ARM64vecb_FDIV32x4,
343 ARM64vecb_FMAX64x2, ARM64vecb_FMAX32x4,
344 ARM64vecb_FMIN64x2, ARM64vecb_FMIN32x4,
345 ARM64vecb_UMAX32x4,
346 ARM64vecb_UMAX16x8, ARM64vecb_UMAX8x16,
347 ARM64vecb_UMIN32x4,
348 ARM64vecb_UMIN16x8, ARM64vecb_UMIN8x16,
349 ARM64vecb_SMAX32x4,
350 ARM64vecb_SMAX16x8, ARM64vecb_SMAX8x16,
351 ARM64vecb_SMIN32x4,
352 ARM64vecb_SMIN16x8, ARM64vecb_SMIN8x16,
353 ARM64vecb_AND,
354 ARM64vecb_ORR,
355 ARM64vecb_XOR,
356 ARM64vecb_CMEQ64x2, ARM64vecb_CMEQ32x4,
357 ARM64vecb_CMEQ16x8, ARM64vecb_CMEQ8x16,
358 ARM64vecb_CMHI64x2, ARM64vecb_CMHI32x4, /* >u */
359 ARM64vecb_CMHI16x8, ARM64vecb_CMHI8x16,
360 ARM64vecb_CMGT64x2, ARM64vecb_CMGT32x4, /* >s */
361 ARM64vecb_CMGT16x8, ARM64vecb_CMGT8x16,
362 ARM64vecb_FCMEQ64x2, ARM64vecb_FCMEQ32x4,
363 ARM64vecb_FCMGE64x2, ARM64vecb_FCMGE32x4,
364 ARM64vecb_FCMGT64x2, ARM64vecb_FCMGT32x4,
365 ARM64vecb_FCMGE16x8, ARM64vecb_FCMGT16x8,
366 ARM64vecb_FCMEQ16x8,
367 ARM64vecb_TBL1,
368 ARM64vecb_UZP164x2, ARM64vecb_UZP132x4,
369 ARM64vecb_UZP116x8, ARM64vecb_UZP18x16,
370 ARM64vecb_UZP264x2, ARM64vecb_UZP232x4,
371 ARM64vecb_UZP216x8, ARM64vecb_UZP28x16,
372 ARM64vecb_ZIP132x4, ARM64vecb_ZIP116x8,
373 ARM64vecb_ZIP18x16, ARM64vecb_ZIP232x4,
374 ARM64vecb_ZIP216x8, ARM64vecb_ZIP28x16,
375 ARM64vecb_PMUL8x16,
376 ARM64vecb_PMULL8x8,
377 ARM64vecb_UMULL2DSS,
378 ARM64vecb_UMULL4SHH, ARM64vecb_UMULL8HBB,
379 ARM64vecb_SMULL2DSS,
380 ARM64vecb_SMULL4SHH, ARM64vecb_SMULL8HBB,
381 ARM64vecb_SQADD64x2, ARM64vecb_SQADD32x4,
382 ARM64vecb_SQADD16x8, ARM64vecb_SQADD8x16,
383 ARM64vecb_UQADD64x2, ARM64vecb_UQADD32x4,
384 ARM64vecb_UQADD16x8, ARM64vecb_UQADD8x16,
385 ARM64vecb_SQSUB64x2, ARM64vecb_SQSUB32x4,
386 ARM64vecb_SQSUB16x8, ARM64vecb_SQSUB8x16,
387 ARM64vecb_UQSUB64x2, ARM64vecb_UQSUB32x4,
388 ARM64vecb_UQSUB16x8, ARM64vecb_UQSUB8x16,
389 ARM64vecb_SQDMULL2DSS,
390 ARM64vecb_SQDMULL4SHH,
391 ARM64vecb_SQDMULH32x4,
392 ARM64vecb_SQDMULH16x8,
393 ARM64vecb_SQRDMULH32x4,
394 ARM64vecb_SQRDMULH16x8,
395 ARM64vecb_SQSHL64x2, ARM64vecb_SQSHL32x4,
396 ARM64vecb_SQSHL16x8, ARM64vecb_SQSHL8x16,
397 ARM64vecb_UQSHL64x2, ARM64vecb_UQSHL32x4,
398 ARM64vecb_UQSHL16x8, ARM64vecb_UQSHL8x16,
399 ARM64vecb_SQRSHL64x2, ARM64vecb_SQRSHL32x4,
400 ARM64vecb_SQRSHL16x8, ARM64vecb_SQRSHL8x16,
401 ARM64vecb_UQRSHL64x2, ARM64vecb_UQRSHL32x4,
402 ARM64vecb_UQRSHL16x8, ARM64vecb_UQRSHL8x16,
403 ARM64vecb_SSHL64x2, ARM64vecb_SSHL32x4,
404 ARM64vecb_SSHL16x8, ARM64vecb_SSHL8x16,
405 ARM64vecb_USHL64x2, ARM64vecb_USHL32x4,
406 ARM64vecb_USHL16x8, ARM64vecb_USHL8x16,
407 ARM64vecb_SRSHL64x2, ARM64vecb_SRSHL32x4,
408 ARM64vecb_SRSHL16x8, ARM64vecb_SRSHL8x16,
409 ARM64vecb_URSHL64x2, ARM64vecb_URSHL32x4,
410 ARM64vecb_URSHL16x8, ARM64vecb_URSHL8x16,
411 ARM64vecb_FRECPS64x2, ARM64vecb_FRECPS32x4,
412 ARM64vecb_FRSQRTS64x2, ARM64vecb_FRSQRTS32x4,
413 ARM64vecb_INVALID
415 ARM64VecBinOp;
417 typedef
418 enum {
419 ARM64vecmo_SUQADD64x2=300, ARM64vecmo_SUQADD32x4,
420 ARM64vecmo_SUQADD16x8, ARM64vecmo_SUQADD8x16,
421 ARM64vecmo_USQADD64x2, ARM64vecmo_USQADD32x4,
422 ARM64vecmo_USQADD16x8, ARM64vecmo_USQADD8x16,
423 ARM64vecmo_INVALID
425 ARM64VecModifyOp;
427 typedef
428 enum {
429 ARM64vecu_FNEG64x2=350, ARM64vecu_FNEG32x4, ARM64vecu_FNEG16x8,
430 ARM64vecu_FABS64x2, ARM64vecu_FABS32x4, ARM64vecu_FABS16x8,
431 ARM64vecu_NOT,
432 ARM64vecu_ABS64x2, ARM64vecu_ABS32x4,
433 ARM64vecu_ABS16x8, ARM64vecu_ABS8x16,
434 ARM64vecu_CLS32x4, ARM64vecu_CLS16x8, ARM64vecu_CLS8x16,
435 ARM64vecu_CLZ32x4, ARM64vecu_CLZ16x8, ARM64vecu_CLZ8x16,
436 ARM64vecu_CNT8x16,
437 ARM64vecu_RBIT,
438 ARM64vecu_REV1616B,
439 ARM64vecu_REV3216B, ARM64vecu_REV328H,
440 ARM64vecu_REV6416B, ARM64vecu_REV648H, ARM64vecu_REV644S,
441 ARM64vecu_URECPE32x4,
442 ARM64vecu_URSQRTE32x4,
443 ARM64vecu_FRECPE64x2, ARM64vecu_FRECPE32x4,
444 ARM64vecu_FRSQRTE64x2, ARM64vecu_FRSQRTE32x4,
445 ARM64vecu_FSQRT64x2, ARM64vecu_FSQRT32x4, ARM64vecu_FSQRT16x8,
446 ARM64vecu_INVALID
448 ARM64VecUnaryOp;
450 typedef
451 enum {
452 ARM64vecshi_USHR64x2=400, ARM64vecshi_USHR32x4,
453 ARM64vecshi_USHR16x8, ARM64vecshi_USHR8x16,
454 ARM64vecshi_SSHR64x2, ARM64vecshi_SSHR32x4,
455 ARM64vecshi_SSHR16x8, ARM64vecshi_SSHR8x16,
456 ARM64vecshi_SHL64x2, ARM64vecshi_SHL32x4,
457 ARM64vecshi_SHL16x8, ARM64vecshi_SHL8x16,
458 /* These narrowing shifts zero out the top half of the destination
459 register. */
460 ARM64vecshi_SQSHRN2SD, ARM64vecshi_SQSHRN4HS, ARM64vecshi_SQSHRN8BH,
461 ARM64vecshi_UQSHRN2SD, ARM64vecshi_UQSHRN4HS, ARM64vecshi_UQSHRN8BH,
462 ARM64vecshi_SQSHRUN2SD, ARM64vecshi_SQSHRUN4HS, ARM64vecshi_SQSHRUN8BH,
463 ARM64vecshi_SQRSHRN2SD, ARM64vecshi_SQRSHRN4HS, ARM64vecshi_SQRSHRN8BH,
464 ARM64vecshi_UQRSHRN2SD, ARM64vecshi_UQRSHRN4HS, ARM64vecshi_UQRSHRN8BH,
465 ARM64vecshi_SQRSHRUN2SD, ARM64vecshi_SQRSHRUN4HS, ARM64vecshi_SQRSHRUN8BH,
466 /* Saturating left shifts, of various flavours. */
467 ARM64vecshi_UQSHL64x2, ARM64vecshi_UQSHL32x4,
468 ARM64vecshi_UQSHL16x8, ARM64vecshi_UQSHL8x16,
469 ARM64vecshi_SQSHL64x2, ARM64vecshi_SQSHL32x4,
470 ARM64vecshi_SQSHL16x8, ARM64vecshi_SQSHL8x16,
471 ARM64vecshi_SQSHLU64x2, ARM64vecshi_SQSHLU32x4,
472 ARM64vecshi_SQSHLU16x8, ARM64vecshi_SQSHLU8x16,
473 ARM64vecshi_INVALID
475 ARM64VecShiftImmOp;
477 typedef
478 enum {
479 ARM64vecna_XTN=450,
480 ARM64vecna_SQXTN,
481 ARM64vecna_UQXTN,
482 ARM64vecna_SQXTUN,
483 ARM64vecna_INVALID
485 ARM64VecNarrowOp;
487 typedef
488 enum {
489 /* baseline */
490 ARM64in_Arith=1220,
491 ARM64in_Cmp,
492 ARM64in_Logic,
493 ARM64in_RRS,
494 ARM64in_Test,
495 ARM64in_Shift,
496 ARM64in_Unary,
497 ARM64in_Set64,
498 ARM64in_MovI, /* int reg-reg move */
499 ARM64in_Imm64,
500 ARM64in_LdSt64,
501 ARM64in_LdSt32, /* w/ ZX loads */
502 ARM64in_LdSt16, /* w/ ZX loads */
503 ARM64in_LdSt8, /* w/ ZX loads */
504 ARM64in_XDirect, /* direct transfer to GA */
505 ARM64in_XIndir, /* indirect transfer to GA */
506 ARM64in_XAssisted, /* assisted transfer to GA */
507 ARM64in_CSel,
508 ARM64in_Call,
509 ARM64in_AddToSP, /* move SP by small, signed constant */
510 ARM64in_FromSP, /* move SP to integer register */
511 ARM64in_Mul,
512 ARM64in_LdrEX, /* load exclusive, single register */
513 ARM64in_StrEX, /* store exclusive, single register */
514 ARM64in_LdrEXP, /* load exclusive, register pair, 2x64-bit only */
515 ARM64in_StrEXP, /* store exclusive, register pair, 2x64-bit only */
516 ARM64in_CAS,
517 ARM64in_CASP,
518 ARM64in_MFence,
519 ARM64in_ClrEX,
520 /* ARM64in_V*: scalar ops involving vector registers */
521 ARM64in_VLdStH, /* ld/st to/from low 16 bits of vec reg, imm offset */
522 ARM64in_VLdStS, /* ld/st to/from low 32 bits of vec reg, imm offset */
523 ARM64in_VLdStD, /* ld/st to/from low 64 bits of vec reg, imm offset */
524 ARM64in_VLdStQ, /* ld/st to/from all 128 bits of vec reg, no offset */
525 ARM64in_VCvtI2F,
526 ARM64in_VCvtF2I,
527 ARM64in_VCvtSD, /* scalar 32 bit FP <--> 64 bit FP */
528 ARM64in_VCvtHS, /* scalar 16 bit FP <--> 32 bit FP */
529 ARM64in_VCvtHD, /* scalar 16 bit FP <--> 64 bit FP */
530 ARM64in_VUnaryD,
531 ARM64in_VUnaryS,
532 ARM64in_VUnaryH,
533 ARM64in_VBinD,
534 ARM64in_VBinS,
535 ARM64in_VBinH,
536 ARM64in_VTriD,
537 ARM64in_VTriS,
538 ARM64in_VCmpD,
539 ARM64in_VCmpS,
540 ARM64in_VCmpH,
541 ARM64in_VFCSel,
542 ARM64in_FPCR,
543 ARM64in_FPSR,
544 /* ARM64in_V*V: vector ops on vector registers */
545 ARM64in_VBinV,
546 ARM64in_VModifyV,
547 ARM64in_VUnaryV,
548 ARM64in_VNarrowV,
549 ARM64in_VShiftImmV,
550 ARM64in_VExtV,
551 ARM64in_VImmQ,
552 ARM64in_VDfromX, /* Move an Xreg to a Dreg */
553 ARM64in_VQfromX, /* Move an Xreg to a Qreg lo64, and zero hi64 */
554 ARM64in_VQfromXX, /* Move 2 Xregs to a Qreg */
555 ARM64in_VXfromQ, /* Move half a Qreg to an Xreg */
556 ARM64in_VXfromDorS, /* Move Dreg or Sreg(ZX) to an Xreg */
557 ARM64in_VMov, /* vector reg-reg move, 16, 8 or 4 bytes */
558 /* infrastructure */
559 ARM64in_EvCheck, /* Event check */
560 ARM64in_ProfInc /* 64-bit profile counter increment */
562 ARM64InstrTag;
564 /* Destinations are on the LEFT (first operand) */
566 typedef
567 struct {
568 ARM64InstrTag tag;
569 union {
570 /* --- INTEGER INSTRUCTIONS --- */
571 /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
572 struct {
573 HReg dst;
574 HReg argL;
575 ARM64RIA* argR;
576 Bool isAdd;
577 } Arith;
578 /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
579 struct {
580 HReg argL;
581 ARM64RIA* argR;
582 Bool is64;
583 } Cmp;
584 /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
585 struct {
586 HReg dst;
587 HReg argL;
588 ARM64RIL* argR;
589 ARM64LogicOp op;
590 } Logic;
591 /* 64 bit AND/OR/XOR/ADD/SUB, reg, reg-with-imm-shift */
592 struct {
593 HReg dst;
594 HReg argL;
595 HReg argR;
596 ARM64ShiftOp shiftOp;
597 UChar amt; /* 1 to 63 only */
598 ARM64RRSOp mainOp;
599 } RRS;
600 /* 64 bit TST reg, reg or bimm (AND and set flags) */
601 struct {
602 HReg argL;
603 ARM64RIL* argR;
604 } Test;
605 /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
606 struct {
607 HReg dst;
608 HReg argL;
609 ARM64RI6* argR;
610 ARM64ShiftOp op;
611 } Shift;
612 /* NOT/NEG/CLZ, 64 bit only */
613 struct {
614 HReg dst;
615 HReg src;
616 ARM64UnaryOp op;
617 } Unary;
618 /* CSET -- Convert a condition code to a 64-bit value (0 or 1). */
619 struct {
620 HReg dst;
621 ARM64CondCode cond;
622 } Set64;
623 /* MOV dst, src -- reg-reg move for integer registers */
624 struct {
625 HReg dst;
626 HReg src;
627 } MovI;
628 /* Pseudo-insn; make a 64-bit immediate */
629 struct {
630 HReg dst;
631 ULong imm64;
632 } Imm64;
633 /* 64-bit load or store */
634 struct {
635 Bool isLoad;
636 HReg rD;
637 ARM64AMode* amode;
638 } LdSt64;
639 /* zx-32-to-64-bit load, or 32-bit store */
640 struct {
641 Bool isLoad;
642 HReg rD;
643 ARM64AMode* amode;
644 } LdSt32;
645 /* zx-16-to-64-bit load, or 16-bit store */
646 struct {
647 Bool isLoad;
648 HReg rD;
649 ARM64AMode* amode;
650 } LdSt16;
651 /* zx-8-to-64-bit load, or 8-bit store */
652 struct {
653 Bool isLoad;
654 HReg rD;
655 ARM64AMode* amode;
656 } LdSt8;
657 /* Update the guest PC value, then exit requesting to chain
658 to it. May be conditional. Urr, use of Addr64 implicitly
659 assumes that wordsize(guest) == wordsize(host). */
660 struct {
661 Addr64 dstGA; /* next guest address */
662 ARM64AMode* amPC; /* amode in guest state for PC */
663 ARM64CondCode cond; /* can be ARM64cc_AL */
664 Bool toFastEP; /* chain to the slow or fast point? */
665 } XDirect;
666 /* Boring transfer to a guest address not known at JIT time.
667 Not chainable. May be conditional. */
668 struct {
669 HReg dstGA;
670 ARM64AMode* amPC;
671 ARM64CondCode cond; /* can be ARM64cc_AL */
672 } XIndir;
673 /* Assisted transfer to a guest address, most general case.
674 Not chainable. May be conditional. */
675 struct {
676 HReg dstGA;
677 ARM64AMode* amPC;
678 ARM64CondCode cond; /* can be ARM64cc_AL */
679 IRJumpKind jk;
680 } XAssisted;
681 /* CSEL: dst = if cond then argL else argR. cond may be anything. */
682 struct {
683 HReg dst;
684 HReg argL;
685 HReg argR;
686 ARM64CondCode cond;
687 } CSel;
688 /* Pseudo-insn. Call target (an absolute address), on given
689 condition (which could be ARM64cc_AL). */
690 struct {
691 RetLoc rloc; /* where the return value will be */
692 Addr64 target;
693 ARM64CondCode cond;
694 Int nArgRegs; /* # regs carrying args: 0 .. 8 */
695 } Call;
696 /* move SP by small, signed constant */
697 struct {
698 Int simm; /* needs to be 0 % 16 and in the range -4095
699 .. 4095 inclusive */
700 } AddToSP;
701 /* move SP to integer register */
702 struct {
703 HReg dst;
704 } FromSP;
705 /* Integer multiply, with 3 variants:
706 (PLAIN) lo64(64 * 64)
707 (ZX) hi64(64 *u 64)
708 (SX) hi64(64 *s 64)
710 struct {
711 HReg dst;
712 HReg argL;
713 HReg argR;
714 ARM64MulOp op;
715 } Mul;
716 /* LDXR{,H,B} x2, [x4] */
717 struct {
718 Int szB; /* 1, 2, 4 or 8 */
719 } LdrEX;
720 /* STXR{,H,B} w0, x2, [x4] */
721 struct {
722 Int szB; /* 1, 2, 4 or 8 */
723 } StrEX;
724 /* LDXP x2, x3, [x4]. This is 2x64-bit only. */
725 struct {
726 } LdrEXP;
727 /* STXP w0, x2, x3, [x4]. This is 2x64-bit only. */
728 struct {
729 } StrEXP;
730 /* x1 = CAS(x3(addr), x5(expected) -> x7(new)),
731 and trashes x8
732 where x1[8*szB-1 : 0] == x5[8*szB-1 : 0] indicates success,
733 x1[8*szB-1 : 0] != x5[8*szB-1 : 0] indicates failure.
734 Uses x8 as scratch (but that's not allocatable).
735 Hence: RD x3, x5, x7; WR x1
737 loop:
738 (szB=8) mov x8, x5
739 (szB=4) and x8, x5, #0xFFFFFFFF
740 (szB=2) and x8, x5, #0xFFFF
741 (szB=1) and x8, x5, #0xFF
742 -- x8 is correctly zero-extended expected value
743 ldxr x1, [x3]
744 -- x1 is correctly zero-extended actual value
745 cmp x1, x8
746 bne after
747 -- if branch taken, failure; x1[[8*szB-1 : 0] holds old value
748 -- attempt to store
749 stxr w8, x7, [x3]
750 -- if store successful, x8==0
751 -- if store failed, branch back and try again.
752 cbne w8, loop
753 after:
755 struct {
756 Int szB; /* 1, 2, 4 or 8 */
757 } CAS;
758 /* Doubleworld CAS, 2 x 32 bit or 2 x 64 bit
759 x0(oldLSW),x1(oldMSW)
760 = DCAS(x2(addr), x4(expectedLSW),x5(expectedMSW)
761 -> x6(newLSW),x7(newMSW))
762 and trashes x8, x9 and x3
764 struct {
765 Int szB; /* 4 or 8 */
766 } CASP;
767 /* Mem fence. An insn which fences all loads and stores as
768 much as possible before continuing. On ARM64 we emit the
769 sequence "dsb sy ; dmb sy ; isb sy", which is probably
770 total nuclear overkill, but better safe than sorry. */
771 struct {
772 } MFence;
773 /* A CLREX instruction. */
774 struct {
775 } ClrEX;
776 /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
777 /* ld/st to/from low 16 bits of vec reg, imm offset */
778 struct {
779 Bool isLoad;
780 HReg hD;
781 HReg rN;
782 UInt uimm12; /* 0 .. 8190 inclusive, 0 % 2 */
783 } VLdStH;
784 /* ld/st to/from low 32 bits of vec reg, imm offset */
785 struct {
786 Bool isLoad;
787 HReg sD;
788 HReg rN;
789 UInt uimm12; /* 0 .. 16380 inclusive, 0 % 4 */
790 } VLdStS;
791 /* ld/st to/from low 64 bits of vec reg, imm offset */
792 struct {
793 Bool isLoad;
794 HReg dD;
795 HReg rN;
796 UInt uimm12; /* 0 .. 32760 inclusive, 0 % 8 */
797 } VLdStD;
798 /* ld/st to/from all 128 bits of vec reg, no offset */
799 struct {
800 Bool isLoad;
801 HReg rQ; // data
802 HReg rN; // address
803 } VLdStQ;
804 /* Scalar conversion of int to float. */
805 struct {
806 ARM64CvtOp how;
807 HReg rD; // dst, a D or S register
808 HReg rS; // src, a W or X register
809 } VCvtI2F;
810 /* Scalar conversion of float to int, w/ specified RM. */
811 struct {
812 ARM64CvtOp how;
813 HReg rD; // dst, a W or X register
814 HReg rS; // src, a D or S register
815 UChar armRM; // ARM encoded RM:
816 // 00=nearest, 01=+inf, 10=-inf, 11=zero
817 } VCvtF2I;
818 /* Convert between 32-bit and 64-bit FP values (both ways). (FCVT) */
819 struct {
820 Bool sToD; /* True: F32->F64. False: F64->F32 */
821 HReg dst;
822 HReg src;
823 } VCvtSD;
824 /* Convert between 16-bit and 32-bit FP values (both ways). (FCVT) */
825 struct {
826 Bool hToS; /* True: F16->F32. False: F32->F16 */
827 HReg dst;
828 HReg src;
829 } VCvtHS;
830 /* Convert between 16-bit and 64-bit FP values (both ways). (FCVT) */
831 struct {
832 Bool hToD; /* True: F16->F64. False: F64->F16 */
833 HReg dst;
834 HReg src;
835 } VCvtHD;
836 /* 64-bit FP unary */
837 struct {
838 ARM64FpUnaryOp op;
839 HReg dst;
840 HReg src;
841 } VUnaryD;
842 /* 32-bit FP unary */
843 struct {
844 ARM64FpUnaryOp op;
845 HReg dst;
846 HReg src;
847 } VUnaryS;
848 /* 16-bit FP unary */
849 struct {
850 ARM64FpUnaryOp op;
851 HReg dst;
852 HReg src;
853 } VUnaryH;
854 /* 64-bit FP binary arithmetic */
855 struct {
856 ARM64FpBinOp op;
857 HReg dst;
858 HReg argL;
859 HReg argR;
860 } VBinD;
861 /* 32-bit FP binary arithmetic */
862 struct {
863 ARM64FpBinOp op;
864 HReg dst;
865 HReg argL;
866 HReg argR;
867 } VBinS;
868 /* 16-bit FP binary arithmetic */
869 struct {
870 ARM64FpBinOp op;
871 HReg dst;
872 HReg argL;
873 HReg argR;
874 } VBinH;
875 /* 64-bit FP ternary arithmetic */
876 struct {
877 ARM64FpTriOp op;
878 HReg dst;
879 HReg arg1;
880 HReg arg2;
881 HReg arg3;
882 } VTriD;
883 /* 32-bit FP ternary arithmetic */
884 struct {
885 ARM64FpTriOp op;
886 HReg dst;
887 HReg arg1;
888 HReg arg2;
889 HReg arg3;
890 } VTriS;
891 /* 64-bit FP compare */
892 struct {
893 HReg argL;
894 HReg argR;
895 } VCmpD;
896 /* 32-bit FP compare */
897 struct {
898 HReg argL;
899 HReg argR;
900 } VCmpS;
901 /* 16-bit FP compare */
902 struct {
903 HReg argL;
904 HReg argR;
905 } VCmpH;
906 /* 32- or 64-bit FP conditional select */
907 struct {
908 HReg dst;
909 HReg argL;
910 HReg argR;
911 ARM64CondCode cond;
912 Bool isD;
914 VFCSel;
915 /* Move a 32-bit value to/from the FPCR */
916 struct {
917 Bool toFPCR;
918 HReg iReg;
919 } FPCR;
920 /* Move a 32-bit value to/from the FPSR */
921 struct {
922 Bool toFPSR;
923 HReg iReg;
924 } FPSR;
925 /* binary vector operation on vector registers */
926 struct {
927 ARM64VecBinOp op;
928 HReg dst;
929 HReg argL;
930 HReg argR;
931 } VBinV;
932 /* binary vector operation on vector registers.
933 Dst reg is also a src. */
934 struct {
935 ARM64VecModifyOp op;
936 HReg mod;
937 HReg arg;
938 } VModifyV;
939 /* unary vector operation on vector registers */
940 struct {
941 ARM64VecUnaryOp op;
942 HReg dst;
943 HReg arg;
944 } VUnaryV;
945 /* vector narrowing, Q -> Q. Result goes in the bottom half
946 of dst and the top half is zeroed out. Iow one of the
947 XTN family. */
948 struct {
949 ARM64VecNarrowOp op;
950 UInt dszBlg2; // 0: 16to8_x8 1: 32to16_x4 2: 64to32_x2
951 HReg dst; // Q reg
952 HReg src; // Q reg
953 } VNarrowV;
954 /* Vector shift by immediate. For left shifts, |amt| must be
955 >= 0 and < implied lane size of |op|. For right shifts,
956 |amt| must be > 0 and <= implied lane size of |op|. Shifts
957 beyond these ranges are not allowed. */
958 struct {
959 ARM64VecShiftImmOp op;
960 HReg dst;
961 HReg src;
962 UInt amt;
963 } VShiftImmV;
964 struct {
965 HReg dst;
966 HReg srcLo;
967 HReg srcHi;
968 UInt amtB;
969 } VExtV;
970 struct {
971 HReg rQ;
972 UShort imm; /* Same 1-bit-per-byte encoding as IR */
973 } VImmQ;
974 struct {
975 HReg rD;
976 HReg rX;
977 } VDfromX;
978 struct {
979 HReg rQ;
980 HReg rXlo;
981 } VQfromX;
982 struct {
983 HReg rQ;
984 HReg rXhi;
985 HReg rXlo;
986 } VQfromXX;
987 struct {
988 HReg rX;
989 HReg rQ;
990 UInt laneNo; /* either 0 or 1 */
991 } VXfromQ;
992 struct {
993 HReg rX;
994 HReg rDorS;
995 Bool fromD;
996 } VXfromDorS;
997 /* MOV dst, src -- reg-reg move for vector registers */
998 struct {
999 UInt szB; // 16=mov qD,qS; 8=mov dD,dS; 4=mov sD,sS
1000 HReg dst;
1001 HReg src;
1002 } VMov;
1003 struct {
1004 ARM64AMode* amCounter;
1005 ARM64AMode* amFailAddr;
1006 } EvCheck;
1007 struct {
1008 /* No fields. The address of the counter to inc is
1009 installed later, post-translation, by patching it in,
1010 as it is not known at translation time. */
1011 } ProfInc;
1012 } ARM64in;
1014 ARM64Instr;
1017 extern ARM64Instr* ARM64Instr_Arith ( HReg, HReg, ARM64RIA*, Bool isAdd );
1018 extern ARM64Instr* ARM64Instr_Cmp ( HReg, ARM64RIA*, Bool is64 );
1019 extern ARM64Instr* ARM64Instr_Logic ( HReg, HReg, ARM64RIL*, ARM64LogicOp );
1020 extern ARM64Instr* ARM64Instr_RRS ( HReg, HReg, HReg, ARM64ShiftOp,
1021 UChar amt, ARM64RRSOp mainOp );
1022 extern ARM64Instr* ARM64Instr_Test ( HReg, ARM64RIL* );
1023 extern ARM64Instr* ARM64Instr_Shift ( HReg, HReg, ARM64RI6*, ARM64ShiftOp );
1024 extern ARM64Instr* ARM64Instr_Unary ( HReg, HReg, ARM64UnaryOp );
1025 extern ARM64Instr* ARM64Instr_Set64 ( HReg, ARM64CondCode );
1026 extern ARM64Instr* ARM64Instr_MovI ( HReg, HReg );
1027 extern ARM64Instr* ARM64Instr_Imm64 ( HReg, ULong );
1028 extern ARM64Instr* ARM64Instr_LdSt64 ( Bool isLoad, HReg, ARM64AMode* );
1029 extern ARM64Instr* ARM64Instr_LdSt32 ( Bool isLoad, HReg, ARM64AMode* );
1030 extern ARM64Instr* ARM64Instr_LdSt16 ( Bool isLoad, HReg, ARM64AMode* );
1031 extern ARM64Instr* ARM64Instr_LdSt8 ( Bool isLoad, HReg, ARM64AMode* );
1032 extern ARM64Instr* ARM64Instr_XDirect ( Addr64 dstGA, ARM64AMode* amPC,
1033 ARM64CondCode cond, Bool toFastEP );
1034 extern ARM64Instr* ARM64Instr_XIndir ( HReg dstGA, ARM64AMode* amPC,
1035 ARM64CondCode cond );
1036 extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
1037 ARM64CondCode cond, IRJumpKind jk );
1038 extern ARM64Instr* ARM64Instr_CSel ( HReg dst, HReg argL, HReg argR,
1039 ARM64CondCode cond );
1040 extern ARM64Instr* ARM64Instr_Call ( ARM64CondCode, Addr64, Int nArgRegs,
1041 RetLoc rloc );
1042 extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
1043 extern ARM64Instr* ARM64Instr_FromSP ( HReg dst );
1044 extern ARM64Instr* ARM64Instr_Mul ( HReg dst, HReg argL, HReg argR,
1045 ARM64MulOp op );
1046 extern ARM64Instr* ARM64Instr_LdrEX ( Int szB );
1047 extern ARM64Instr* ARM64Instr_StrEX ( Int szB );
1048 extern ARM64Instr* ARM64Instr_LdrEXP ( void );
1049 extern ARM64Instr* ARM64Instr_StrEXP ( void );
1050 extern ARM64Instr* ARM64Instr_CAS ( Int szB );
1051 extern ARM64Instr* ARM64Instr_CASP ( Int szB );
1052 extern ARM64Instr* ARM64Instr_MFence ( void );
1053 extern ARM64Instr* ARM64Instr_ClrEX ( void );
1054 extern ARM64Instr* ARM64Instr_VLdStH ( Bool isLoad, HReg sD, HReg rN,
1055 UInt uimm12 /* 0 .. 8190, 0 % 2 */ );
1056 extern ARM64Instr* ARM64Instr_VLdStS ( Bool isLoad, HReg sD, HReg rN,
1057 UInt uimm12 /* 0 .. 16380, 0 % 4 */ );
1058 extern ARM64Instr* ARM64Instr_VLdStD ( Bool isLoad, HReg dD, HReg rN,
1059 UInt uimm12 /* 0 .. 32760, 0 % 8 */ );
1060 extern ARM64Instr* ARM64Instr_VLdStQ ( Bool isLoad, HReg rQ, HReg rN );
1061 extern ARM64Instr* ARM64Instr_VCvtI2F ( ARM64CvtOp how, HReg rD, HReg rS );
1062 extern ARM64Instr* ARM64Instr_VCvtF2I ( ARM64CvtOp how, HReg rD, HReg rS,
1063 UChar armRM );
1064 extern ARM64Instr* ARM64Instr_VCvtSD ( Bool sToD, HReg dst, HReg src );
1065 extern ARM64Instr* ARM64Instr_VCvtHS ( Bool hToS, HReg dst, HReg src );
1066 extern ARM64Instr* ARM64Instr_VCvtHD ( Bool hToD, HReg dst, HReg src );
1067 extern ARM64Instr* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op, HReg dst, HReg src );
1068 extern ARM64Instr* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op, HReg dst, HReg src );
1069 extern ARM64Instr* ARM64Instr_VUnaryH ( ARM64FpUnaryOp op, HReg dst, HReg src );
1070 extern ARM64Instr* ARM64Instr_VBinD ( ARM64FpBinOp op, HReg, HReg, HReg );
1071 extern ARM64Instr* ARM64Instr_VBinS ( ARM64FpBinOp op, HReg, HReg, HReg );
1072 extern ARM64Instr* ARM64Instr_VBinH ( ARM64FpBinOp op, HReg, HReg, HReg );
1073 extern ARM64Instr* ARM64Instr_VTriD ( ARM64FpTriOp op, HReg dst,
1074 HReg, HReg, HReg );
1075 extern ARM64Instr* ARM64Instr_VTriS ( ARM64FpTriOp op, HReg dst,
1076 HReg, HReg, HReg );
1077 extern ARM64Instr* ARM64Instr_VCmpD ( HReg argL, HReg argR );
1078 extern ARM64Instr* ARM64Instr_VCmpS ( HReg argL, HReg argR );
1079 extern ARM64Instr* ARM64Instr_VCmpH ( HReg argL, HReg argR );
1080 extern ARM64Instr* ARM64Instr_VFCSel ( HReg dst, HReg argL, HReg argR,
1081 ARM64CondCode cond, Bool isD );
1082 extern ARM64Instr* ARM64Instr_FPCR ( Bool toFPCR, HReg iReg );
1083 extern ARM64Instr* ARM64Instr_FPSR ( Bool toFPSR, HReg iReg );
1084 extern ARM64Instr* ARM64Instr_VBinV ( ARM64VecBinOp op, HReg, HReg, HReg );
1085 extern ARM64Instr* ARM64Instr_VModifyV ( ARM64VecModifyOp, HReg, HReg );
1086 extern ARM64Instr* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op, HReg, HReg );
1087 extern ARM64Instr* ARM64Instr_VNarrowV ( ARM64VecNarrowOp op, UInt dszBlg2,
1088 HReg dst, HReg src );
1089 extern ARM64Instr* ARM64Instr_VShiftImmV ( ARM64VecShiftImmOp op,
1090 HReg dst, HReg src, UInt amt );
1091 extern ARM64Instr* ARM64Instr_VExtV ( HReg dst,
1092 HReg srcLo, HReg srcHi, UInt amtB );
1093 extern ARM64Instr* ARM64Instr_VImmQ ( HReg, UShort );
1094 extern ARM64Instr* ARM64Instr_VDfromX ( HReg rD, HReg rX );
1095 extern ARM64Instr* ARM64Instr_VQfromX ( HReg rQ, HReg rXlo );
1096 extern ARM64Instr* ARM64Instr_VQfromXX( HReg rQ, HReg rXhi, HReg rXlo );
1097 extern ARM64Instr* ARM64Instr_VXfromQ ( HReg rX, HReg rQ, UInt laneNo );
1098 extern ARM64Instr* ARM64Instr_VXfromDorS ( HReg rX, HReg rDorS, Bool fromD );
1099 extern ARM64Instr* ARM64Instr_VMov ( UInt szB, HReg dst, HReg src );
1101 extern ARM64Instr* ARM64Instr_EvCheck ( ARM64AMode* amCounter,
1102 ARM64AMode* amFailAddr );
1103 extern ARM64Instr* ARM64Instr_ProfInc ( void );
1105 extern void ppARM64Instr ( const ARM64Instr* );
1108 /* Some functions that insulate the register allocator from details
1109 of the underlying instruction set. */
1110 extern void getRegUsage_ARM64Instr ( HRegUsage*, const ARM64Instr*, Bool );
1111 extern void mapRegs_ARM64Instr ( HRegRemap*, ARM64Instr*, Bool );
1112 extern Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
1113 UChar* buf, Int nbuf, const ARM64Instr* i,
1114 Bool mode64,
1115 VexEndness endness_host,
1116 const void* disp_cp_chain_me_to_slowEP,
1117 const void* disp_cp_chain_me_to_fastEP,
1118 const void* disp_cp_xindir,
1119 const void* disp_cp_xassisted );
1121 extern void genSpill_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
1122 HReg rreg, Int offset, Bool );
1123 extern void genReload_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
1124 HReg rreg, Int offset, Bool );
1125 extern ARM64Instr* genMove_ARM64(HReg from, HReg to, Bool);
1127 extern const RRegUniverse* getRRegUniverse_ARM64 ( void );
1129 extern HInstrArray* iselSB_ARM64 ( const IRSB*,
1130 VexArch,
1131 const VexArchInfo*,
1132 const VexAbiInfo*,
1133 Int offs_Host_EvC_Counter,
1134 Int offs_Host_EvC_FailAddr,
1135 Bool chainingAllowed,
1136 Bool addProfInc,
1137 Addr max_ga );
1139 /* How big is an event check? This is kind of a kludge because it
1140 depends on the offsets of host_EvC_FAILADDR and
1141 host_EvC_COUNTER. */
1142 extern Int evCheckSzB_ARM64 (void);
1144 /* Perform a chaining and unchaining of an XDirect jump. */
1145 extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
1146 void* place_to_chain,
1147 const void* disp_cp_chain_me_EXPECTED,
1148 const void* place_to_jump_to );
1150 extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
1151 void* place_to_unchain,
1152 const void* place_to_jump_to_EXPECTED,
1153 const void* disp_cp_chain_me );
1155 /* Patch the counter location into an existing ProfInc point. */
1156 extern VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
1157 void* place_to_patch,
1158 const ULong* location_of_counter );
1161 #endif /* ndef __VEX_HOST_ARM64_DEFS_H */
1163 /*---------------------------------------------------------------*/
1164 /*--- end host_arm64_defs.h ---*/
1165 /*---------------------------------------------------------------*/