tests: vmstate static checker: add size mismatch inside substructure
[qemu/kevin.git] / tcg / ia64 / tcg-target.c
blob6bc9924641846b25c12db2a742e508a391986c2a
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009-2010 Aurelien Jarno <aurelien@aurel32.net>
5 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 * THE SOFTWARE.
27 * Register definitions
30 #ifndef NDEBUG
31 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
32 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
33 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
34 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
35 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
36 "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
37 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47",
38 "r48", "r49", "r50", "r51", "r52", "r53", "r54", "r55",
39 "r56", "r57", "r58", "r59", "r60", "r61", "r62", "r63",
41 #endif
43 #ifdef CONFIG_USE_GUEST_BASE
44 #define TCG_GUEST_BASE_REG TCG_REG_R55
45 #else
46 #define TCG_GUEST_BASE_REG TCG_REG_R0
47 #endif
48 #ifndef GUEST_BASE
49 #define GUEST_BASE 0
50 #endif
52 /* Branch registers */
53 enum {
54 TCG_REG_B0 = 0,
55 TCG_REG_B1,
56 TCG_REG_B2,
57 TCG_REG_B3,
58 TCG_REG_B4,
59 TCG_REG_B5,
60 TCG_REG_B6,
61 TCG_REG_B7,
64 /* Floating point registers */
65 enum {
66 TCG_REG_F0 = 0,
67 TCG_REG_F1,
68 TCG_REG_F2,
69 TCG_REG_F3,
70 TCG_REG_F4,
71 TCG_REG_F5,
72 TCG_REG_F6,
73 TCG_REG_F7,
74 TCG_REG_F8,
75 TCG_REG_F9,
76 TCG_REG_F10,
77 TCG_REG_F11,
78 TCG_REG_F12,
79 TCG_REG_F13,
80 TCG_REG_F14,
81 TCG_REG_F15,
84 /* Predicate registers */
85 enum {
86 TCG_REG_P0 = 0,
87 TCG_REG_P1,
88 TCG_REG_P2,
89 TCG_REG_P3,
90 TCG_REG_P4,
91 TCG_REG_P5,
92 TCG_REG_P6,
93 TCG_REG_P7,
94 TCG_REG_P8,
95 TCG_REG_P9,
96 TCG_REG_P10,
97 TCG_REG_P11,
98 TCG_REG_P12,
99 TCG_REG_P13,
100 TCG_REG_P14,
101 TCG_REG_P15,
104 /* Application registers */
105 enum {
106 TCG_REG_PFS = 64,
109 static const int tcg_target_reg_alloc_order[] = {
110 TCG_REG_R35,
111 TCG_REG_R36,
112 TCG_REG_R37,
113 TCG_REG_R38,
114 TCG_REG_R39,
115 TCG_REG_R40,
116 TCG_REG_R41,
117 TCG_REG_R42,
118 TCG_REG_R43,
119 TCG_REG_R44,
120 TCG_REG_R45,
121 TCG_REG_R46,
122 TCG_REG_R47,
123 TCG_REG_R48,
124 TCG_REG_R49,
125 TCG_REG_R50,
126 TCG_REG_R51,
127 TCG_REG_R52,
128 TCG_REG_R53,
129 TCG_REG_R54,
130 TCG_REG_R55,
131 TCG_REG_R14,
132 TCG_REG_R15,
133 TCG_REG_R16,
134 TCG_REG_R17,
135 TCG_REG_R18,
136 TCG_REG_R19,
137 TCG_REG_R20,
138 TCG_REG_R21,
139 TCG_REG_R22,
140 TCG_REG_R23,
141 TCG_REG_R24,
142 TCG_REG_R25,
143 TCG_REG_R26,
144 TCG_REG_R27,
145 TCG_REG_R28,
146 TCG_REG_R29,
147 TCG_REG_R30,
148 TCG_REG_R31,
149 TCG_REG_R56,
150 TCG_REG_R57,
151 TCG_REG_R58,
152 TCG_REG_R59,
153 TCG_REG_R60,
154 TCG_REG_R61,
155 TCG_REG_R62,
156 TCG_REG_R63,
157 TCG_REG_R8,
158 TCG_REG_R9,
159 TCG_REG_R10,
160 TCG_REG_R11
163 static const int tcg_target_call_iarg_regs[8] = {
164 TCG_REG_R56,
165 TCG_REG_R57,
166 TCG_REG_R58,
167 TCG_REG_R59,
168 TCG_REG_R60,
169 TCG_REG_R61,
170 TCG_REG_R62,
171 TCG_REG_R63,
174 static const int tcg_target_call_oarg_regs[] = {
175 TCG_REG_R8
179 * opcode formation
182 /* bundle templates: stops (double bar in the IA64 manual) are marked with
183 an uppercase letter. */
184 enum {
185 mii = 0x00,
186 miI = 0x01,
187 mIi = 0x02,
188 mII = 0x03,
189 mlx = 0x04,
190 mLX = 0x05,
191 mmi = 0x08,
192 mmI = 0x09,
193 Mmi = 0x0a,
194 MmI = 0x0b,
195 mfi = 0x0c,
196 mfI = 0x0d,
197 mmf = 0x0e,
198 mmF = 0x0f,
199 mib = 0x10,
200 miB = 0x11,
201 mbb = 0x12,
202 mbB = 0x13,
203 bbb = 0x16,
204 bbB = 0x17,
205 mmb = 0x18,
206 mmB = 0x19,
207 mfb = 0x1c,
208 mfB = 0x1d,
211 enum {
212 OPC_ADD_A1 = 0x10000000000ull,
213 OPC_AND_A1 = 0x10060000000ull,
214 OPC_AND_A3 = 0x10160000000ull,
215 OPC_ANDCM_A1 = 0x10068000000ull,
216 OPC_ANDCM_A3 = 0x10168000000ull,
217 OPC_ADDS_A4 = 0x10800000000ull,
218 OPC_ADDL_A5 = 0x12000000000ull,
219 OPC_ALLOC_M34 = 0x02c00000000ull,
220 OPC_BR_DPTK_FEW_B1 = 0x08400000000ull,
221 OPC_BR_SPTK_MANY_B1 = 0x08000001000ull,
222 OPC_BR_CALL_SPNT_FEW_B3 = 0x0a200000000ull,
223 OPC_BR_SPTK_MANY_B4 = 0x00100001000ull,
224 OPC_BR_CALL_SPTK_MANY_B5 = 0x02100001000ull,
225 OPC_BR_RET_SPTK_MANY_B4 = 0x00108001100ull,
226 OPC_BRL_SPTK_MANY_X3 = 0x18000001000ull,
227 OPC_BRL_CALL_SPNT_MANY_X4 = 0x1a200001000ull,
228 OPC_BRL_CALL_SPTK_MANY_X4 = 0x1a000001000ull,
229 OPC_CMP_LT_A6 = 0x18000000000ull,
230 OPC_CMP_LTU_A6 = 0x1a000000000ull,
231 OPC_CMP_EQ_A6 = 0x1c000000000ull,
232 OPC_CMP4_LT_A6 = 0x18400000000ull,
233 OPC_CMP4_LTU_A6 = 0x1a400000000ull,
234 OPC_CMP4_EQ_A6 = 0x1c400000000ull,
235 OPC_DEP_I14 = 0x0ae00000000ull,
236 OPC_DEP_I15 = 0x08000000000ull,
237 OPC_DEP_Z_I12 = 0x0a600000000ull,
238 OPC_EXTR_I11 = 0x0a400002000ull,
239 OPC_EXTR_U_I11 = 0x0a400000000ull,
240 OPC_FCVT_FX_TRUNC_S1_F10 = 0x004d0000000ull,
241 OPC_FCVT_FXU_TRUNC_S1_F10 = 0x004d8000000ull,
242 OPC_FCVT_XF_F11 = 0x000e0000000ull,
243 OPC_FMA_S1_F1 = 0x10400000000ull,
244 OPC_FNMA_S1_F1 = 0x18400000000ull,
245 OPC_FRCPA_S1_F6 = 0x00600000000ull,
246 OPC_GETF_SIG_M19 = 0x08708000000ull,
247 OPC_LD1_M1 = 0x08000000000ull,
248 OPC_LD1_M3 = 0x0a000000000ull,
249 OPC_LD2_M1 = 0x08040000000ull,
250 OPC_LD2_M3 = 0x0a040000000ull,
251 OPC_LD4_M1 = 0x08080000000ull,
252 OPC_LD4_M3 = 0x0a080000000ull,
253 OPC_LD8_M1 = 0x080c0000000ull,
254 OPC_LD8_M3 = 0x0a0c0000000ull,
255 OPC_MUX1_I3 = 0x0eca0000000ull,
256 OPC_NOP_B9 = 0x04008000000ull,
257 OPC_NOP_F16 = 0x00008000000ull,
258 OPC_NOP_I18 = 0x00008000000ull,
259 OPC_NOP_M48 = 0x00008000000ull,
260 OPC_MOV_I21 = 0x00e00100000ull,
261 OPC_MOV_RET_I21 = 0x00e00500000ull,
262 OPC_MOV_I22 = 0x00188000000ull,
263 OPC_MOV_I_I26 = 0x00150000000ull,
264 OPC_MOVL_X2 = 0x0c000000000ull,
265 OPC_OR_A1 = 0x10070000000ull,
266 OPC_OR_A3 = 0x10170000000ull,
267 OPC_SETF_EXP_M18 = 0x0c748000000ull,
268 OPC_SETF_SIG_M18 = 0x0c708000000ull,
269 OPC_SHL_I7 = 0x0f240000000ull,
270 OPC_SHR_I5 = 0x0f220000000ull,
271 OPC_SHR_U_I5 = 0x0f200000000ull,
272 OPC_SHRP_I10 = 0x0ac00000000ull,
273 OPC_SXT1_I29 = 0x000a0000000ull,
274 OPC_SXT2_I29 = 0x000a8000000ull,
275 OPC_SXT4_I29 = 0x000b0000000ull,
276 OPC_ST1_M4 = 0x08c00000000ull,
277 OPC_ST2_M4 = 0x08c40000000ull,
278 OPC_ST4_M4 = 0x08c80000000ull,
279 OPC_ST8_M4 = 0x08cc0000000ull,
280 OPC_SUB_A1 = 0x10028000000ull,
281 OPC_SUB_A3 = 0x10128000000ull,
282 OPC_UNPACK4_L_I2 = 0x0f860000000ull,
283 OPC_XMA_L_F2 = 0x1d000000000ull,
284 OPC_XOR_A1 = 0x10078000000ull,
285 OPC_XOR_A3 = 0x10178000000ull,
286 OPC_ZXT1_I29 = 0x00080000000ull,
287 OPC_ZXT2_I29 = 0x00088000000ull,
288 OPC_ZXT4_I29 = 0x00090000000ull,
290 INSN_NOP_M = OPC_NOP_M48, /* nop.m 0 */
291 INSN_NOP_I = OPC_NOP_I18, /* nop.i 0 */
294 static inline uint64_t tcg_opc_a1(int qp, uint64_t opc, int r1,
295 int r2, int r3)
297 return opc
298 | ((r3 & 0x7f) << 20)
299 | ((r2 & 0x7f) << 13)
300 | ((r1 & 0x7f) << 6)
301 | (qp & 0x3f);
304 static inline uint64_t tcg_opc_a3(int qp, uint64_t opc, int r1,
305 uint64_t imm, int r3)
307 return opc
308 | ((imm & 0x80) << 29) /* s */
309 | ((imm & 0x7f) << 13) /* imm7b */
310 | ((r3 & 0x7f) << 20)
311 | ((r1 & 0x7f) << 6)
312 | (qp & 0x3f);
315 static inline uint64_t tcg_opc_a4(int qp, uint64_t opc, int r1,
316 uint64_t imm, int r3)
318 return opc
319 | ((imm & 0x2000) << 23) /* s */
320 | ((imm & 0x1f80) << 20) /* imm6d */
321 | ((imm & 0x007f) << 13) /* imm7b */
322 | ((r3 & 0x7f) << 20)
323 | ((r1 & 0x7f) << 6)
324 | (qp & 0x3f);
327 static inline uint64_t tcg_opc_a5(int qp, uint64_t opc, int r1,
328 uint64_t imm, int r3)
330 return opc
331 | ((imm & 0x200000) << 15) /* s */
332 | ((imm & 0x1f0000) << 6) /* imm5c */
333 | ((imm & 0x00ff80) << 20) /* imm9d */
334 | ((imm & 0x00007f) << 13) /* imm7b */
335 | ((r3 & 0x03) << 20)
336 | ((r1 & 0x7f) << 6)
337 | (qp & 0x3f);
340 static inline uint64_t tcg_opc_a6(int qp, uint64_t opc, int p1,
341 int p2, int r2, int r3)
343 return opc
344 | ((p2 & 0x3f) << 27)
345 | ((r3 & 0x7f) << 20)
346 | ((r2 & 0x7f) << 13)
347 | ((p1 & 0x3f) << 6)
348 | (qp & 0x3f);
351 static inline uint64_t tcg_opc_b1(int qp, uint64_t opc, uint64_t imm)
353 return opc
354 | ((imm & 0x100000) << 16) /* s */
355 | ((imm & 0x0fffff) << 13) /* imm20b */
356 | (qp & 0x3f);
359 static inline uint64_t tcg_opc_b3(int qp, uint64_t opc, int b1, uint64_t imm)
361 return opc
362 | ((imm & 0x100000) << 16) /* s */
363 | ((imm & 0x0fffff) << 13) /* imm20b */
364 | ((b1 & 0x7) << 6)
365 | (qp & 0x3f);
368 static inline uint64_t tcg_opc_b4(int qp, uint64_t opc, int b2)
370 return opc
371 | ((b2 & 0x7) << 13)
372 | (qp & 0x3f);
375 static inline uint64_t tcg_opc_b5(int qp, uint64_t opc, int b1, int b2)
377 return opc
378 | ((b2 & 0x7) << 13)
379 | ((b1 & 0x7) << 6)
380 | (qp & 0x3f);
384 static inline uint64_t tcg_opc_b9(int qp, uint64_t opc, uint64_t imm)
386 return opc
387 | ((imm & 0x100000) << 16) /* i */
388 | ((imm & 0x0fffff) << 6) /* imm20a */
389 | (qp & 0x3f);
392 static inline uint64_t tcg_opc_f1(int qp, uint64_t opc, int f1,
393 int f3, int f4, int f2)
395 return opc
396 | ((f4 & 0x7f) << 27)
397 | ((f3 & 0x7f) << 20)
398 | ((f2 & 0x7f) << 13)
399 | ((f1 & 0x7f) << 6)
400 | (qp & 0x3f);
403 static inline uint64_t tcg_opc_f2(int qp, uint64_t opc, int f1,
404 int f3, int f4, int f2)
406 return opc
407 | ((f4 & 0x7f) << 27)
408 | ((f3 & 0x7f) << 20)
409 | ((f2 & 0x7f) << 13)
410 | ((f1 & 0x7f) << 6)
411 | (qp & 0x3f);
414 static inline uint64_t tcg_opc_f6(int qp, uint64_t opc, int f1,
415 int p2, int f2, int f3)
417 return opc
418 | ((p2 & 0x3f) << 27)
419 | ((f3 & 0x7f) << 20)
420 | ((f2 & 0x7f) << 13)
421 | ((f1 & 0x7f) << 6)
422 | (qp & 0x3f);
425 static inline uint64_t tcg_opc_f10(int qp, uint64_t opc, int f1, int f2)
427 return opc
428 | ((f2 & 0x7f) << 13)
429 | ((f1 & 0x7f) << 6)
430 | (qp & 0x3f);
433 static inline uint64_t tcg_opc_f11(int qp, uint64_t opc, int f1, int f2)
435 return opc
436 | ((f2 & 0x7f) << 13)
437 | ((f1 & 0x7f) << 6)
438 | (qp & 0x3f);
441 static inline uint64_t tcg_opc_f16(int qp, uint64_t opc, uint64_t imm)
443 return opc
444 | ((imm & 0x100000) << 16) /* i */
445 | ((imm & 0x0fffff) << 6) /* imm20a */
446 | (qp & 0x3f);
449 static inline uint64_t tcg_opc_i2(int qp, uint64_t opc, int r1,
450 int r2, int r3)
452 return opc
453 | ((r3 & 0x7f) << 20)
454 | ((r2 & 0x7f) << 13)
455 | ((r1 & 0x7f) << 6)
456 | (qp & 0x3f);
459 static inline uint64_t tcg_opc_i3(int qp, uint64_t opc, int r1,
460 int r2, int mbtype)
462 return opc
463 | ((mbtype & 0x0f) << 20)
464 | ((r2 & 0x7f) << 13)
465 | ((r1 & 0x7f) << 6)
466 | (qp & 0x3f);
469 static inline uint64_t tcg_opc_i5(int qp, uint64_t opc, int r1,
470 int r3, int r2)
472 return opc
473 | ((r3 & 0x7f) << 20)
474 | ((r2 & 0x7f) << 13)
475 | ((r1 & 0x7f) << 6)
476 | (qp & 0x3f);
479 static inline uint64_t tcg_opc_i7(int qp, uint64_t opc, int r1,
480 int r2, int r3)
482 return opc
483 | ((r3 & 0x7f) << 20)
484 | ((r2 & 0x7f) << 13)
485 | ((r1 & 0x7f) << 6)
486 | (qp & 0x3f);
489 static inline uint64_t tcg_opc_i10(int qp, uint64_t opc, int r1,
490 int r2, int r3, uint64_t count)
492 return opc
493 | ((count & 0x3f) << 27)
494 | ((r3 & 0x7f) << 20)
495 | ((r2 & 0x7f) << 13)
496 | ((r1 & 0x7f) << 6)
497 | (qp & 0x3f);
500 static inline uint64_t tcg_opc_i11(int qp, uint64_t opc, int r1,
501 int r3, uint64_t pos, uint64_t len)
503 return opc
504 | ((len & 0x3f) << 27)
505 | ((r3 & 0x7f) << 20)
506 | ((pos & 0x3f) << 14)
507 | ((r1 & 0x7f) << 6)
508 | (qp & 0x3f);
511 static inline uint64_t tcg_opc_i12(int qp, uint64_t opc, int r1,
512 int r2, uint64_t pos, uint64_t len)
514 return opc
515 | ((len & 0x3f) << 27)
516 | ((pos & 0x3f) << 20)
517 | ((r2 & 0x7f) << 13)
518 | ((r1 & 0x7f) << 6)
519 | (qp & 0x3f);
522 static inline uint64_t tcg_opc_i14(int qp, uint64_t opc, int r1, uint64_t imm,
523 int r3, uint64_t pos, uint64_t len)
525 return opc
526 | ((imm & 0x01) << 36)
527 | ((len & 0x3f) << 27)
528 | ((r3 & 0x7f) << 20)
529 | ((pos & 0x3f) << 14)
530 | ((r1 & 0x7f) << 6)
531 | (qp & 0x3f);
534 static inline uint64_t tcg_opc_i15(int qp, uint64_t opc, int r1, int r2,
535 int r3, uint64_t pos, uint64_t len)
537 return opc
538 | ((pos & 0x3f) << 31)
539 | ((len & 0x0f) << 27)
540 | ((r3 & 0x7f) << 20)
541 | ((r2 & 0x7f) << 13)
542 | ((r1 & 0x7f) << 6)
543 | (qp & 0x3f);
546 static inline uint64_t tcg_opc_i18(int qp, uint64_t opc, uint64_t imm)
548 return opc
549 | ((imm & 0x100000) << 16) /* i */
550 | ((imm & 0x0fffff) << 6) /* imm20a */
551 | (qp & 0x3f);
554 static inline uint64_t tcg_opc_i21(int qp, uint64_t opc, int b1,
555 int r2, uint64_t imm)
557 return opc
558 | ((imm & 0x1ff) << 24)
559 | ((r2 & 0x7f) << 13)
560 | ((b1 & 0x7) << 6)
561 | (qp & 0x3f);
564 static inline uint64_t tcg_opc_i22(int qp, uint64_t opc, int r1, int b2)
566 return opc
567 | ((b2 & 0x7) << 13)
568 | ((r1 & 0x7f) << 6)
569 | (qp & 0x3f);
572 static inline uint64_t tcg_opc_i26(int qp, uint64_t opc, int ar3, int r2)
574 return opc
575 | ((ar3 & 0x7f) << 20)
576 | ((r2 & 0x7f) << 13)
577 | (qp & 0x3f);
580 static inline uint64_t tcg_opc_i29(int qp, uint64_t opc, int r1, int r3)
582 return opc
583 | ((r3 & 0x7f) << 20)
584 | ((r1 & 0x7f) << 6)
585 | (qp & 0x3f);
588 static inline uint64_t tcg_opc_l2(uint64_t imm)
590 return (imm & 0x7fffffffffc00000ull) >> 22;
593 static inline uint64_t tcg_opc_l3(uint64_t imm)
595 return (imm & 0x07fffffffff00000ull) >> 18;
598 #define tcg_opc_l4 tcg_opc_l3
600 static inline uint64_t tcg_opc_m1(int qp, uint64_t opc, int r1, int r3)
602 return opc
603 | ((r3 & 0x7f) << 20)
604 | ((r1 & 0x7f) << 6)
605 | (qp & 0x3f);
608 static inline uint64_t tcg_opc_m3(int qp, uint64_t opc, int r1,
609 int r3, uint64_t imm)
611 return opc
612 | ((imm & 0x100) << 28) /* s */
613 | ((imm & 0x080) << 20) /* i */
614 | ((imm & 0x07f) << 13) /* imm7b */
615 | ((r3 & 0x7f) << 20)
616 | ((r1 & 0x7f) << 6)
617 | (qp & 0x3f);
620 static inline uint64_t tcg_opc_m4(int qp, uint64_t opc, int r2, int r3)
622 return opc
623 | ((r3 & 0x7f) << 20)
624 | ((r2 & 0x7f) << 13)
625 | (qp & 0x3f);
628 static inline uint64_t tcg_opc_m18(int qp, uint64_t opc, int f1, int r2)
630 return opc
631 | ((r2 & 0x7f) << 13)
632 | ((f1 & 0x7f) << 6)
633 | (qp & 0x3f);
636 static inline uint64_t tcg_opc_m19(int qp, uint64_t opc, int r1, int f2)
638 return opc
639 | ((f2 & 0x7f) << 13)
640 | ((r1 & 0x7f) << 6)
641 | (qp & 0x3f);
644 static inline uint64_t tcg_opc_m34(int qp, uint64_t opc, int r1,
645 int sof, int sol, int sor)
647 return opc
648 | ((sor & 0x0f) << 27)
649 | ((sol & 0x7f) << 20)
650 | ((sof & 0x7f) << 13)
651 | ((r1 & 0x7f) << 6)
652 | (qp & 0x3f);
655 static inline uint64_t tcg_opc_m48(int qp, uint64_t opc, uint64_t imm)
657 return opc
658 | ((imm & 0x100000) << 16) /* i */
659 | ((imm & 0x0fffff) << 6) /* imm20a */
660 | (qp & 0x3f);
663 static inline uint64_t tcg_opc_x2(int qp, uint64_t opc,
664 int r1, uint64_t imm)
666 return opc
667 | ((imm & 0x8000000000000000ull) >> 27) /* i */
668 | (imm & 0x0000000000200000ull) /* ic */
669 | ((imm & 0x00000000001f0000ull) << 6) /* imm5c */
670 | ((imm & 0x000000000000ff80ull) << 20) /* imm9d */
671 | ((imm & 0x000000000000007full) << 13) /* imm7b */
672 | ((r1 & 0x7f) << 6)
673 | (qp & 0x3f);
676 static inline uint64_t tcg_opc_x3(int qp, uint64_t opc, uint64_t imm)
678 return opc
679 | ((imm & 0x0800000000000000ull) >> 23) /* i */
680 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
681 | (qp & 0x3f);
684 static inline uint64_t tcg_opc_x4(int qp, uint64_t opc, int b1, uint64_t imm)
686 return opc
687 | ((imm & 0x0800000000000000ull) >> 23) /* i */
688 | ((imm & 0x00000000000fffffull) << 13) /* imm20b */
689 | ((b1 & 0x7) << 6)
690 | (qp & 0x3f);
695 * Relocations - Note that we never encode branches elsewhere than slot 2.
698 static void reloc_pcrel21b_slot2(tcg_insn_unit *pc, tcg_insn_unit *target)
700 uint64_t imm = target - pc;
702 pc->hi = (pc->hi & 0xf700000fffffffffull)
703 | ((imm & 0x100000) << 39) /* s */
704 | ((imm & 0x0fffff) << 36); /* imm20b */
707 static uint64_t get_reloc_pcrel21b_slot2(tcg_insn_unit *pc)
709 int64_t high = pc->hi;
711 return ((high >> 39) & 0x100000) + /* s */
712 ((high >> 36) & 0x0fffff); /* imm20b */
715 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
716 intptr_t value, intptr_t addend)
718 assert(addend == 0);
719 assert(type == R_IA64_PCREL21B);
720 reloc_pcrel21b_slot2(code_ptr, (tcg_insn_unit *)value);
724 * Constraints
727 /* parse target specific constraints */
728 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
730 const char *ct_str;
732 ct_str = *pct_str;
733 switch(ct_str[0]) {
734 case 'r':
735 ct->ct |= TCG_CT_REG;
736 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
737 break;
738 case 'I':
739 ct->ct |= TCG_CT_CONST_S22;
740 break;
741 case 'S':
742 ct->ct |= TCG_CT_REG;
743 tcg_regset_set(ct->u.regs, 0xffffffffffffffffull);
744 #if defined(CONFIG_SOFTMMU)
745 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R56);
746 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R57);
747 tcg_regset_reset_reg(ct->u.regs, TCG_REG_R58);
748 #endif
749 break;
750 case 'Z':
751 /* We are cheating a bit here, using the fact that the register
752 r0 is also the register number 0. Hence there is no need
753 to check for const_args in each instruction. */
754 ct->ct |= TCG_CT_CONST_ZERO;
755 break;
756 default:
757 return -1;
759 ct_str++;
760 *pct_str = ct_str;
761 return 0;
764 /* test if a constant matches the constraint */
765 static inline int tcg_target_const_match(tcg_target_long val, TCGType type,
766 const TCGArgConstraint *arg_ct)
768 int ct;
769 ct = arg_ct->ct;
770 if (ct & TCG_CT_CONST)
771 return 1;
772 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
773 return 1;
774 else if ((ct & TCG_CT_CONST_S22) && val == ((int32_t)val << 10) >> 10)
775 return 1;
776 else
777 return 0;
781 * Code generation
784 static tcg_insn_unit *tb_ret_addr;
786 static inline void tcg_out_bundle(TCGContext *s, int template,
787 uint64_t slot0, uint64_t slot1,
788 uint64_t slot2)
790 template &= 0x1f; /* 5 bits */
791 slot0 &= 0x1ffffffffffull; /* 41 bits */
792 slot1 &= 0x1ffffffffffull; /* 41 bits */
793 slot2 &= 0x1ffffffffffull; /* 41 bits */
795 *s->code_ptr++ = (tcg_insn_unit){
796 (slot1 << 46) | (slot0 << 5) | template,
797 (slot2 << 23) | (slot1 >> 18)
801 static inline uint64_t tcg_opc_mov_a(int qp, TCGReg dst, TCGReg src)
803 return tcg_opc_a4(qp, OPC_ADDS_A4, dst, 0, src);
806 static inline void tcg_out_mov(TCGContext *s, TCGType type,
807 TCGReg ret, TCGReg arg)
809 tcg_out_bundle(s, mmI,
810 INSN_NOP_M,
811 INSN_NOP_M,
812 tcg_opc_mov_a(TCG_REG_P0, ret, arg));
815 static inline uint64_t tcg_opc_movi_a(int qp, TCGReg dst, int64_t src)
817 assert(src == sextract64(src, 0, 22));
818 return tcg_opc_a5(qp, OPC_ADDL_A5, dst, src, TCG_REG_R0);
821 static inline void tcg_out_movi(TCGContext *s, TCGType type,
822 TCGReg reg, tcg_target_long arg)
824 tcg_out_bundle(s, mLX,
825 INSN_NOP_M,
826 tcg_opc_l2 (arg),
827 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, reg, arg));
830 static void tcg_out_br(TCGContext *s, int label_index)
832 TCGLabel *l = &s->labels[label_index];
833 uint64_t imm;
835 /* We pay attention here to not modify the branch target by reading
836 the existing value and using it again. This ensure that caches and
837 memory are kept coherent during retranslation. */
838 if (l->has_value) {
839 imm = l->u.value_ptr - s->code_ptr;
840 } else {
841 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
842 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, label_index, 0);
845 tcg_out_bundle(s, mmB,
846 INSN_NOP_M,
847 INSN_NOP_M,
848 tcg_opc_b1(TCG_REG_P0, OPC_BR_SPTK_MANY_B1, imm));
851 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *desc)
853 uintptr_t func = desc->lo, gp = desc->hi, disp;
855 /* Look through the function descriptor. */
856 tcg_out_bundle(s, mlx,
857 INSN_NOP_M,
858 tcg_opc_l2 (gp),
859 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2, TCG_REG_R1, gp));
860 disp = (tcg_insn_unit *)func - s->code_ptr;
861 tcg_out_bundle(s, mLX,
862 INSN_NOP_M,
863 tcg_opc_l4 (disp),
864 tcg_opc_x4 (TCG_REG_P0, OPC_BRL_CALL_SPTK_MANY_X4,
865 TCG_REG_B0, disp));
868 static void tcg_out_exit_tb(TCGContext *s, tcg_target_long arg)
870 uint64_t imm, opc1;
872 /* At least arg == 0 is a common operation. */
873 if (arg == sextract64(arg, 0, 22)) {
874 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R8, arg);
875 } else {
876 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R8, arg);
877 opc1 = INSN_NOP_M;
880 imm = tb_ret_addr - s->code_ptr;
882 tcg_out_bundle(s, mLX,
883 opc1,
884 tcg_opc_l3 (imm),
885 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, imm));
888 static inline void tcg_out_goto_tb(TCGContext *s, TCGArg arg)
890 if (s->tb_jmp_offset) {
891 /* direct jump method */
892 tcg_abort();
893 } else {
894 /* indirect jump method */
895 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2,
896 (tcg_target_long)(s->tb_next + arg));
897 tcg_out_bundle(s, MmI,
898 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1,
899 TCG_REG_R2, TCG_REG_R2),
900 INSN_NOP_M,
901 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6,
902 TCG_REG_R2, 0));
903 tcg_out_bundle(s, mmB,
904 INSN_NOP_M,
905 INSN_NOP_M,
906 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4,
907 TCG_REG_B6));
909 s->tb_next_offset[arg] = tcg_current_code_size(s);
912 static inline void tcg_out_jmp(TCGContext *s, TCGArg addr)
914 tcg_out_bundle(s, mmI,
915 INSN_NOP_M,
916 INSN_NOP_M,
917 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21, TCG_REG_B6, addr, 0));
918 tcg_out_bundle(s, mmB,
919 INSN_NOP_M,
920 INSN_NOP_M,
921 tcg_opc_b4(TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
924 static inline void tcg_out_ld_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
925 TCGArg arg1, tcg_target_long arg2)
927 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
928 tcg_out_bundle(s, MmI,
929 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
930 TCG_REG_R2, arg2, arg1),
931 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
932 INSN_NOP_I);
933 } else {
934 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
935 tcg_out_bundle(s, MmI,
936 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
937 TCG_REG_R2, TCG_REG_R2, arg1),
938 tcg_opc_m1 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
939 INSN_NOP_I);
943 static inline void tcg_out_st_rel(TCGContext *s, uint64_t opc_m4, TCGArg arg,
944 TCGArg arg1, tcg_target_long arg2)
946 if (arg2 == ((int16_t)arg2 >> 2) << 2) {
947 tcg_out_bundle(s, MmI,
948 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4,
949 TCG_REG_R2, arg2, arg1),
950 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
951 INSN_NOP_I);
952 } else {
953 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R2, arg2);
954 tcg_out_bundle(s, MmI,
955 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
956 TCG_REG_R2, TCG_REG_R2, arg1),
957 tcg_opc_m4 (TCG_REG_P0, opc_m4, arg, TCG_REG_R2),
958 INSN_NOP_I);
962 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
963 TCGReg arg1, intptr_t arg2)
965 if (type == TCG_TYPE_I32) {
966 tcg_out_ld_rel(s, OPC_LD4_M1, arg, arg1, arg2);
967 } else {
968 tcg_out_ld_rel(s, OPC_LD8_M1, arg, arg1, arg2);
972 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
973 TCGReg arg1, intptr_t arg2)
975 if (type == TCG_TYPE_I32) {
976 tcg_out_st_rel(s, OPC_ST4_M4, arg, arg1, arg2);
977 } else {
978 tcg_out_st_rel(s, OPC_ST8_M4, arg, arg1, arg2);
982 static inline void tcg_out_alu(TCGContext *s, uint64_t opc_a1, uint64_t opc_a3,
983 TCGReg ret, TCGArg arg1, int const_arg1,
984 TCGArg arg2, int const_arg2)
986 uint64_t opc1 = 0, opc2 = 0, opc3 = 0;
988 if (const_arg2 && arg2 != 0) {
989 opc2 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R3, arg2);
990 arg2 = TCG_REG_R3;
992 if (const_arg1 && arg1 != 0) {
993 if (opc_a3 && arg1 == (int8_t)arg1) {
994 opc3 = tcg_opc_a3(TCG_REG_P0, opc_a3, ret, arg1, arg2);
995 } else {
996 opc1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, arg1);
997 arg1 = TCG_REG_R2;
1000 if (opc3 == 0) {
1001 opc3 = tcg_opc_a1(TCG_REG_P0, opc_a1, ret, arg1, arg2);
1004 tcg_out_bundle(s, (opc1 || opc2 ? mII : miI),
1005 opc1 ? opc1 : INSN_NOP_M,
1006 opc2 ? opc2 : INSN_NOP_I,
1007 opc3);
1010 static inline void tcg_out_add(TCGContext *s, TCGReg ret, TCGReg arg1,
1011 TCGArg arg2, int const_arg2)
1013 if (const_arg2 && arg2 == sextract64(arg2, 0, 14)) {
1014 tcg_out_bundle(s, mmI,
1015 INSN_NOP_M,
1016 INSN_NOP_M,
1017 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, arg2, arg1));
1018 } else {
1019 tcg_out_alu(s, OPC_ADD_A1, 0, ret, arg1, 0, arg2, const_arg2);
1023 static inline void tcg_out_sub(TCGContext *s, TCGReg ret, TCGArg arg1,
1024 int const_arg1, TCGArg arg2, int const_arg2)
1026 if (!const_arg1 && const_arg2 && -arg2 == sextract64(-arg2, 0, 14)) {
1027 tcg_out_bundle(s, mmI,
1028 INSN_NOP_M,
1029 INSN_NOP_M,
1030 tcg_opc_a4(TCG_REG_P0, OPC_ADDS_A4, ret, -arg2, arg1));
1031 } else {
1032 tcg_out_alu(s, OPC_SUB_A1, OPC_SUB_A3, ret,
1033 arg1, const_arg1, arg2, const_arg2);
1037 static inline void tcg_out_eqv(TCGContext *s, TCGArg ret,
1038 TCGArg arg1, int const_arg1,
1039 TCGArg arg2, int const_arg2)
1041 tcg_out_bundle(s, mII,
1042 INSN_NOP_M,
1043 tcg_opc_a1 (TCG_REG_P0, OPC_XOR_A1, ret, arg1, arg2),
1044 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1047 static inline void tcg_out_nand(TCGContext *s, TCGArg ret,
1048 TCGArg arg1, int const_arg1,
1049 TCGArg arg2, int const_arg2)
1051 tcg_out_bundle(s, mII,
1052 INSN_NOP_M,
1053 tcg_opc_a1 (TCG_REG_P0, OPC_AND_A1, ret, arg1, arg2),
1054 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1057 static inline void tcg_out_nor(TCGContext *s, TCGArg ret,
1058 TCGArg arg1, int const_arg1,
1059 TCGArg arg2, int const_arg2)
1061 tcg_out_bundle(s, mII,
1062 INSN_NOP_M,
1063 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, arg2),
1064 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, ret, -1, ret));
1067 static inline void tcg_out_orc(TCGContext *s, TCGArg ret,
1068 TCGArg arg1, int const_arg1,
1069 TCGArg arg2, int const_arg2)
1071 tcg_out_bundle(s, mII,
1072 INSN_NOP_M,
1073 tcg_opc_a3 (TCG_REG_P0, OPC_ANDCM_A3, TCG_REG_R2, -1, arg2),
1074 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret, arg1, TCG_REG_R2));
1077 static inline void tcg_out_mul(TCGContext *s, TCGArg ret,
1078 TCGArg arg1, TCGArg arg2)
1080 tcg_out_bundle(s, mmI,
1081 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F6, arg1),
1082 tcg_opc_m18(TCG_REG_P0, OPC_SETF_SIG_M18, TCG_REG_F7, arg2),
1083 INSN_NOP_I);
1084 tcg_out_bundle(s, mmF,
1085 INSN_NOP_M,
1086 INSN_NOP_M,
1087 tcg_opc_f2 (TCG_REG_P0, OPC_XMA_L_F2, TCG_REG_F6, TCG_REG_F6,
1088 TCG_REG_F7, TCG_REG_F0));
1089 tcg_out_bundle(s, miI,
1090 tcg_opc_m19(TCG_REG_P0, OPC_GETF_SIG_M19, ret, TCG_REG_F6),
1091 INSN_NOP_I,
1092 INSN_NOP_I);
1095 static inline void tcg_out_sar_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1096 TCGArg arg2, int const_arg2)
1098 if (const_arg2) {
1099 tcg_out_bundle(s, miI,
1100 INSN_NOP_M,
1101 INSN_NOP_I,
1102 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1103 ret, arg1, arg2, 31 - arg2));
1104 } else {
1105 tcg_out_bundle(s, mII,
1106 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3,
1107 TCG_REG_R3, 0x1f, arg2),
1108 tcg_opc_i29(TCG_REG_P0, OPC_SXT4_I29, TCG_REG_R2, arg1),
1109 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret,
1110 TCG_REG_R2, TCG_REG_R3));
1114 static inline void tcg_out_sar_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1115 TCGArg arg2, int const_arg2)
1117 if (const_arg2) {
1118 tcg_out_bundle(s, miI,
1119 INSN_NOP_M,
1120 INSN_NOP_I,
1121 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_I11,
1122 ret, arg1, arg2, 63 - arg2));
1123 } else {
1124 tcg_out_bundle(s, miI,
1125 INSN_NOP_M,
1126 INSN_NOP_I,
1127 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_I5, ret, arg1, arg2));
1131 static inline void tcg_out_shl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1132 TCGArg arg2, int const_arg2)
1134 if (const_arg2) {
1135 tcg_out_bundle(s, miI,
1136 INSN_NOP_M,
1137 INSN_NOP_I,
1138 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1139 arg1, 63 - arg2, 31 - arg2));
1140 } else {
1141 tcg_out_bundle(s, mII,
1142 INSN_NOP_M,
1143 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R2,
1144 0x1f, arg2),
1145 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1146 arg1, TCG_REG_R2));
1150 static inline void tcg_out_shl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1151 TCGArg arg2, int const_arg2)
1153 if (const_arg2) {
1154 tcg_out_bundle(s, miI,
1155 INSN_NOP_M,
1156 INSN_NOP_I,
1157 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret,
1158 arg1, 63 - arg2, 63 - arg2));
1159 } else {
1160 tcg_out_bundle(s, miI,
1161 INSN_NOP_M,
1162 INSN_NOP_I,
1163 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, ret,
1164 arg1, arg2));
1168 static inline void tcg_out_shr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1169 TCGArg arg2, int const_arg2)
1171 if (const_arg2) {
1172 tcg_out_bundle(s, miI,
1173 INSN_NOP_M,
1174 INSN_NOP_I,
1175 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1176 arg1, arg2, 31 - arg2));
1177 } else {
1178 tcg_out_bundle(s, mII,
1179 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1180 0x1f, arg2),
1181 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29, TCG_REG_R2, arg1),
1182 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1183 TCG_REG_R2, TCG_REG_R3));
1187 static inline void tcg_out_shr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1188 TCGArg arg2, int const_arg2)
1190 if (const_arg2) {
1191 tcg_out_bundle(s, miI,
1192 INSN_NOP_M,
1193 INSN_NOP_I,
1194 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1195 arg1, arg2, 63 - arg2));
1196 } else {
1197 tcg_out_bundle(s, miI,
1198 INSN_NOP_M,
1199 INSN_NOP_I,
1200 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1201 arg1, arg2));
1205 static inline void tcg_out_rotl_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1206 TCGArg arg2, int const_arg2)
1208 if (const_arg2) {
1209 tcg_out_bundle(s, mII,
1210 INSN_NOP_M,
1211 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1212 TCG_REG_R2, arg1, arg1),
1213 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1214 TCG_REG_R2, 32 - arg2, 31));
1215 } else {
1216 tcg_out_bundle(s, miI,
1217 INSN_NOP_M,
1218 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1219 TCG_REG_R2, arg1, arg1),
1220 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1221 0x1f, arg2));
1222 tcg_out_bundle(s, mII,
1223 INSN_NOP_M,
1224 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R3,
1225 0x20, TCG_REG_R3),
1226 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1227 TCG_REG_R2, TCG_REG_R3));
1231 static inline void tcg_out_rotl_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1232 TCGArg arg2, int const_arg2)
1234 if (const_arg2) {
1235 tcg_out_bundle(s, miI,
1236 INSN_NOP_M,
1237 INSN_NOP_I,
1238 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1239 arg1, 0x40 - arg2));
1240 } else {
1241 tcg_out_bundle(s, mII,
1242 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1243 0x40, arg2),
1244 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R3,
1245 arg1, arg2),
1246 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R2,
1247 arg1, TCG_REG_R2));
1248 tcg_out_bundle(s, miI,
1249 INSN_NOP_M,
1250 INSN_NOP_I,
1251 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1252 TCG_REG_R2, TCG_REG_R3));
1256 static inline void tcg_out_rotr_i32(TCGContext *s, TCGArg ret, TCGArg arg1,
1257 TCGArg arg2, int const_arg2)
1259 if (const_arg2) {
1260 tcg_out_bundle(s, mII,
1261 INSN_NOP_M,
1262 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1263 TCG_REG_R2, arg1, arg1),
1264 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, ret,
1265 TCG_REG_R2, arg2, 31));
1266 } else {
1267 tcg_out_bundle(s, mII,
1268 tcg_opc_a3 (TCG_REG_P0, OPC_AND_A3, TCG_REG_R3,
1269 0x1f, arg2),
1270 tcg_opc_i2 (TCG_REG_P0, OPC_UNPACK4_L_I2,
1271 TCG_REG_R2, arg1, arg1),
1272 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, ret,
1273 TCG_REG_R2, TCG_REG_R3));
1277 static inline void tcg_out_rotr_i64(TCGContext *s, TCGArg ret, TCGArg arg1,
1278 TCGArg arg2, int const_arg2)
1280 if (const_arg2) {
1281 tcg_out_bundle(s, miI,
1282 INSN_NOP_M,
1283 INSN_NOP_I,
1284 tcg_opc_i10(TCG_REG_P0, OPC_SHRP_I10, ret, arg1,
1285 arg1, arg2));
1286 } else {
1287 tcg_out_bundle(s, mII,
1288 tcg_opc_a3 (TCG_REG_P0, OPC_SUB_A3, TCG_REG_R2,
1289 0x40, arg2),
1290 tcg_opc_i5 (TCG_REG_P0, OPC_SHR_U_I5, TCG_REG_R3,
1291 arg1, arg2),
1292 tcg_opc_i7 (TCG_REG_P0, OPC_SHL_I7, TCG_REG_R2,
1293 arg1, TCG_REG_R2));
1294 tcg_out_bundle(s, miI,
1295 INSN_NOP_M,
1296 INSN_NOP_I,
1297 tcg_opc_a1 (TCG_REG_P0, OPC_OR_A1, ret,
1298 TCG_REG_R2, TCG_REG_R3));
1302 static const uint64_t opc_ext_i29[8] = {
1303 OPC_ZXT1_I29, OPC_ZXT2_I29, OPC_ZXT4_I29, 0,
1304 OPC_SXT1_I29, OPC_SXT2_I29, OPC_SXT4_I29, 0
1307 static inline uint64_t tcg_opc_ext_i(int qp, TCGMemOp opc, TCGReg d, TCGReg s)
1309 if ((opc & MO_SIZE) == MO_64) {
1310 return tcg_opc_mov_a(qp, d, s);
1311 } else {
1312 return tcg_opc_i29(qp, opc_ext_i29[opc & MO_SSIZE], d, s);
1316 static inline void tcg_out_ext(TCGContext *s, uint64_t opc_i29,
1317 TCGArg ret, TCGArg arg)
1319 tcg_out_bundle(s, miI,
1320 INSN_NOP_M,
1321 INSN_NOP_I,
1322 tcg_opc_i29(TCG_REG_P0, opc_i29, ret, arg));
1325 static inline uint64_t tcg_opc_bswap64_i(int qp, TCGReg d, TCGReg s)
1327 return tcg_opc_i3(qp, OPC_MUX1_I3, d, s, 0xb);
1330 static inline void tcg_out_bswap16(TCGContext *s, TCGArg ret, TCGArg arg)
1332 tcg_out_bundle(s, mII,
1333 INSN_NOP_M,
1334 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 15, 15),
1335 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1338 static inline void tcg_out_bswap32(TCGContext *s, TCGArg ret, TCGArg arg)
1340 tcg_out_bundle(s, mII,
1341 INSN_NOP_M,
1342 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, ret, arg, 31, 31),
1343 tcg_opc_bswap64_i(TCG_REG_P0, ret, ret));
1346 static inline void tcg_out_bswap64(TCGContext *s, TCGArg ret, TCGArg arg)
1348 tcg_out_bundle(s, miI,
1349 INSN_NOP_M,
1350 INSN_NOP_I,
1351 tcg_opc_bswap64_i(TCG_REG_P0, ret, arg));
1354 static inline void tcg_out_deposit(TCGContext *s, TCGArg ret, TCGArg a1,
1355 TCGArg a2, int const_a2, int pos, int len)
1357 uint64_t i1 = 0, i2 = 0;
1358 int cpos = 63 - pos, lm1 = len - 1;
1360 if (const_a2) {
1361 /* Truncate the value of a constant a2 to the width of the field. */
1362 int mask = (1u << len) - 1;
1363 a2 &= mask;
1365 if (a2 == 0 || a2 == mask) {
1366 /* 1-bit signed constant inserted into register. */
1367 i2 = tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, ret, a2, a1, cpos, lm1);
1368 } else {
1369 /* Otherwise, load any constant into a temporary. Do this into
1370 the first I slot to help out with cross-unit delays. */
1371 i1 = tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, a2);
1372 a2 = TCG_REG_R2;
1375 if (i2 == 0) {
1376 i2 = tcg_opc_i15(TCG_REG_P0, OPC_DEP_I15, ret, a2, a1, cpos, lm1);
1378 tcg_out_bundle(s, (i1 ? mII : miI),
1379 INSN_NOP_M,
1380 i1 ? i1 : INSN_NOP_I,
1381 i2);
1384 static inline uint64_t tcg_opc_cmp_a(int qp, TCGCond cond, TCGArg arg1,
1385 TCGArg arg2, int cmp4)
1387 uint64_t opc_eq_a6, opc_lt_a6, opc_ltu_a6;
1389 if (cmp4) {
1390 opc_eq_a6 = OPC_CMP4_EQ_A6;
1391 opc_lt_a6 = OPC_CMP4_LT_A6;
1392 opc_ltu_a6 = OPC_CMP4_LTU_A6;
1393 } else {
1394 opc_eq_a6 = OPC_CMP_EQ_A6;
1395 opc_lt_a6 = OPC_CMP_LT_A6;
1396 opc_ltu_a6 = OPC_CMP_LTU_A6;
1399 switch (cond) {
1400 case TCG_COND_EQ:
1401 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1402 case TCG_COND_NE:
1403 return tcg_opc_a6 (qp, opc_eq_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1404 case TCG_COND_LT:
1405 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1406 case TCG_COND_LTU:
1407 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg1, arg2);
1408 case TCG_COND_GE:
1409 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1410 case TCG_COND_GEU:
1411 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg1, arg2);
1412 case TCG_COND_LE:
1413 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1414 case TCG_COND_LEU:
1415 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P7, TCG_REG_P6, arg2, arg1);
1416 case TCG_COND_GT:
1417 return tcg_opc_a6 (qp, opc_lt_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1418 case TCG_COND_GTU:
1419 return tcg_opc_a6 (qp, opc_ltu_a6, TCG_REG_P6, TCG_REG_P7, arg2, arg1);
1420 default:
1421 tcg_abort();
1422 break;
1426 static inline void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
1427 TCGReg arg2, int label_index, int cmp4)
1429 TCGLabel *l = &s->labels[label_index];
1430 uint64_t imm;
1432 /* We pay attention here to not modify the branch target by reading
1433 the existing value and using it again. This ensure that caches and
1434 memory are kept coherent during retranslation. */
1435 if (l->has_value) {
1436 imm = l->u.value_ptr - s->code_ptr;
1437 } else {
1438 imm = get_reloc_pcrel21b_slot2(s->code_ptr);
1439 tcg_out_reloc(s, s->code_ptr, R_IA64_PCREL21B, label_index, 0);
1442 tcg_out_bundle(s, miB,
1443 INSN_NOP_M,
1444 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1445 tcg_opc_b1(TCG_REG_P6, OPC_BR_DPTK_FEW_B1, imm));
1448 static inline void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg ret,
1449 TCGArg arg1, TCGArg arg2, int cmp4)
1451 tcg_out_bundle(s, MmI,
1452 tcg_opc_cmp_a(TCG_REG_P0, cond, arg1, arg2, cmp4),
1453 tcg_opc_movi_a(TCG_REG_P6, ret, 1),
1454 tcg_opc_movi_a(TCG_REG_P7, ret, 0));
1457 static inline void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGArg ret,
1458 TCGArg c1, TCGArg c2,
1459 TCGArg v1, int const_v1,
1460 TCGArg v2, int const_v2, int cmp4)
1462 uint64_t opc1, opc2;
1464 if (const_v1) {
1465 opc1 = tcg_opc_movi_a(TCG_REG_P6, ret, v1);
1466 } else if (ret == v1) {
1467 opc1 = INSN_NOP_M;
1468 } else {
1469 opc1 = tcg_opc_mov_a(TCG_REG_P6, ret, v1);
1471 if (const_v2) {
1472 opc2 = tcg_opc_movi_a(TCG_REG_P7, ret, v2);
1473 } else if (ret == v2) {
1474 opc2 = INSN_NOP_I;
1475 } else {
1476 opc2 = tcg_opc_mov_a(TCG_REG_P7, ret, v2);
1479 tcg_out_bundle(s, MmI,
1480 tcg_opc_cmp_a(TCG_REG_P0, cond, c1, c2, cmp4),
1481 opc1,
1482 opc2);
1485 #if defined(CONFIG_SOFTMMU)
1486 /* We're expecting to use an signed 22-bit immediate add. */
1487 QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1])
1488 > 0x1fffff)
1490 /* Load and compare a TLB entry, and return the result in (p6, p7).
1491 R2 is loaded with the addend TLB entry.
1492 R57 is loaded with the address, zero extented on 32-bit targets.
1493 R1, R3 are clobbered, leaving R56 free for...
1494 BSWAP_1, BSWAP_2 and I-slot insns for swapping data for store. */
1495 static inline void tcg_out_qemu_tlb(TCGContext *s, TCGReg addr_reg,
1496 TCGMemOp s_bits, int off_rw, int off_add,
1497 uint64_t bswap1, uint64_t bswap2)
1500 .mii
1501 mov r2 = off_rw
1502 extr.u r3 = addr_reg, ... # extract tlb page
1503 zxt4 r57 = addr_reg # or mov for 64-bit guest
1505 .mii
1506 addl r2 = r2, areg0
1507 shl r3 = r3, cteb # via dep.z
1508 dep r1 = 0, r57, ... # zero page ofs, keep align
1510 .mmi
1511 add r2 = r2, r3
1513 ld4 r3 = [r2], off_add-off_rw # or ld8 for 64-bit guest
1516 .mmi
1518 cmp.eq p6, p7 = r3, r58
1522 tcg_out_bundle(s, miI,
1523 tcg_opc_movi_a(TCG_REG_P0, TCG_REG_R2, off_rw),
1524 tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11, TCG_REG_R3,
1525 addr_reg, TARGET_PAGE_BITS, CPU_TLB_BITS - 1),
1526 tcg_opc_ext_i(TCG_REG_P0,
1527 TARGET_LONG_BITS == 32 ? MO_UL : MO_Q,
1528 TCG_REG_R57, addr_reg));
1529 tcg_out_bundle(s, miI,
1530 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1531 TCG_REG_R2, TCG_AREG0),
1532 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12, TCG_REG_R3,
1533 TCG_REG_R3, 63 - CPU_TLB_ENTRY_BITS,
1534 63 - CPU_TLB_ENTRY_BITS),
1535 tcg_opc_i14(TCG_REG_P0, OPC_DEP_I14, TCG_REG_R1, 0,
1536 TCG_REG_R57, 63 - s_bits,
1537 TARGET_PAGE_BITS - s_bits - 1));
1538 tcg_out_bundle(s, MmI,
1539 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1,
1540 TCG_REG_R2, TCG_REG_R2, TCG_REG_R3),
1541 tcg_opc_m3 (TCG_REG_P0,
1542 (TARGET_LONG_BITS == 32
1543 ? OPC_LD4_M3 : OPC_LD8_M3), TCG_REG_R3,
1544 TCG_REG_R2, off_add - off_rw),
1545 bswap1);
1546 tcg_out_bundle(s, mmI,
1547 tcg_opc_m1 (TCG_REG_P0, OPC_LD8_M1, TCG_REG_R2, TCG_REG_R2),
1548 tcg_opc_a6 (TCG_REG_P0, OPC_CMP_EQ_A6, TCG_REG_P6,
1549 TCG_REG_P7, TCG_REG_R1, TCG_REG_R3),
1550 bswap2);
1553 #define TCG_MAX_QEMU_LDST 640
1555 typedef struct TCGLabelQemuLdst {
1556 bool is_ld;
1557 TCGMemOp size;
1558 tcg_insn_unit *label_ptr; /* label pointers to be updated */
1559 } TCGLabelQemuLdst;
1561 typedef struct TCGBackendData {
1562 int nb_ldst_labels;
1563 TCGLabelQemuLdst ldst_labels[TCG_MAX_QEMU_LDST];
1564 } TCGBackendData;
1566 static inline void tcg_out_tb_init(TCGContext *s)
1568 s->be->nb_ldst_labels = 0;
1571 static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
1572 tcg_insn_unit *label_ptr)
1574 TCGBackendData *be = s->be;
1575 TCGLabelQemuLdst *l = &be->ldst_labels[be->nb_ldst_labels++];
1577 assert(be->nb_ldst_labels <= TCG_MAX_QEMU_LDST);
1578 l->is_ld = is_ld;
1579 l->size = opc & MO_SIZE;
1580 l->label_ptr = label_ptr;
1583 static void tcg_out_tb_finalize(TCGContext *s)
1585 static const void * const helpers[8] = {
1586 helper_ret_stb_mmu,
1587 helper_le_stw_mmu,
1588 helper_le_stl_mmu,
1589 helper_le_stq_mmu,
1590 helper_ret_ldub_mmu,
1591 helper_le_lduw_mmu,
1592 helper_le_ldul_mmu,
1593 helper_le_ldq_mmu,
1595 tcg_insn_unit *thunks[8] = { };
1596 TCGBackendData *be = s->be;
1597 size_t i, n = be->nb_ldst_labels;
1599 for (i = 0; i < n; i++) {
1600 TCGLabelQemuLdst *l = &be->ldst_labels[i];
1601 long x = l->is_ld * 4 + l->size;
1602 tcg_insn_unit *dest = thunks[x];
1604 /* The out-of-line thunks are all the same; load the return address
1605 from B0, load the GP, and branch to the code. Note that we are
1606 always post-call, so the register window has rolled, so we're
1607 using incomming parameter register numbers, not outgoing. */
1608 if (dest == NULL) {
1609 uintptr_t *desc = (uintptr_t *)helpers[x];
1610 uintptr_t func = desc[0], gp = desc[1], disp;
1612 thunks[x] = dest = s->code_ptr;
1614 tcg_out_bundle(s, mlx,
1615 INSN_NOP_M,
1616 tcg_opc_l2 (gp),
1617 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
1618 TCG_REG_R1, gp));
1619 tcg_out_bundle(s, mii,
1620 INSN_NOP_M,
1621 INSN_NOP_I,
1622 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
1623 l->is_ld ? TCG_REG_R35 : TCG_REG_R36,
1624 TCG_REG_B0));
1625 disp = (tcg_insn_unit *)func - s->code_ptr;
1626 tcg_out_bundle(s, mLX,
1627 INSN_NOP_M,
1628 tcg_opc_l3 (disp),
1629 tcg_opc_x3 (TCG_REG_P0, OPC_BRL_SPTK_MANY_X3, disp));
1632 reloc_pcrel21b_slot2(l->label_ptr, dest);
1636 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1638 static const uint64_t opc_ld_m1[4] = {
1639 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1641 int addr_reg, data_reg, mem_index;
1642 TCGMemOp opc, s_bits;
1643 uint64_t fin1, fin2;
1644 tcg_insn_unit *label_ptr;
1646 data_reg = args[0];
1647 addr_reg = args[1];
1648 opc = args[2];
1649 mem_index = args[3];
1650 s_bits = opc & MO_SIZE;
1652 /* Read the TLB entry */
1653 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1654 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read),
1655 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1656 INSN_NOP_I, INSN_NOP_I);
1658 /* P6 is the fast path, and P7 the slow path */
1660 fin2 = 0;
1661 if (opc & MO_BSWAP) {
1662 fin1 = tcg_opc_bswap64_i(TCG_REG_P0, data_reg, TCG_REG_R8);
1663 if (s_bits < MO_64) {
1664 int shift = 64 - (8 << s_bits);
1665 fin2 = (opc & MO_SIGN ? OPC_EXTR_I11 : OPC_EXTR_U_I11);
1666 fin2 = tcg_opc_i11(TCG_REG_P0, fin2,
1667 data_reg, data_reg, shift, 63 - shift);
1669 } else {
1670 fin1 = tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, TCG_REG_R8);
1673 tcg_out_bundle(s, mmI,
1674 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1675 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1676 TCG_REG_R2, TCG_REG_R57),
1677 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R58, mem_index));
1678 label_ptr = s->code_ptr;
1679 tcg_out_bundle(s, miB,
1680 tcg_opc_m1 (TCG_REG_P6, opc_ld_m1[s_bits],
1681 TCG_REG_R8, TCG_REG_R2),
1682 INSN_NOP_I,
1683 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1684 get_reloc_pcrel21b_slot2(label_ptr)));
1686 add_qemu_ldst_label(s, 1, opc, label_ptr);
1688 /* Note that we always use LE helper functions, so the bswap insns
1689 here for the fast path also apply to the slow path. */
1690 tcg_out_bundle(s, (fin2 ? mII : miI),
1691 INSN_NOP_M,
1692 fin1,
1693 fin2 ? fin2 : INSN_NOP_I);
1696 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1698 static const uint64_t opc_st_m4[4] = {
1699 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1701 TCGReg addr_reg, data_reg;
1702 int mem_index;
1703 uint64_t pre1, pre2;
1704 TCGMemOp opc, s_bits;
1705 tcg_insn_unit *label_ptr;
1707 data_reg = args[0];
1708 addr_reg = args[1];
1709 opc = args[2];
1710 mem_index = args[3];
1711 s_bits = opc & MO_SIZE;
1713 /* Note that we always use LE helper functions, so the bswap insns
1714 that are here for the fast path also apply to the slow path,
1715 and move the data into the argument register. */
1716 pre2 = INSN_NOP_I;
1717 if (opc & MO_BSWAP) {
1718 pre1 = tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R58, data_reg);
1719 if (s_bits < MO_64) {
1720 int shift = 64 - (8 << s_bits);
1721 pre2 = tcg_opc_i11(TCG_REG_P0, OPC_EXTR_U_I11,
1722 TCG_REG_R58, TCG_REG_R58, shift, 63 - shift);
1724 } else {
1725 /* Just move the data into place for the slow path. */
1726 pre1 = tcg_opc_ext_i(TCG_REG_P0, opc, TCG_REG_R58, data_reg);
1729 tcg_out_qemu_tlb(s, addr_reg, s_bits,
1730 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write),
1731 offsetof(CPUArchState, tlb_table[mem_index][0].addend),
1732 pre1, pre2);
1734 /* P6 is the fast path, and P7 the slow path */
1735 tcg_out_bundle(s, mmI,
1736 tcg_opc_mov_a(TCG_REG_P7, TCG_REG_R56, TCG_AREG0),
1737 tcg_opc_a1 (TCG_REG_P6, OPC_ADD_A1, TCG_REG_R2,
1738 TCG_REG_R2, TCG_REG_R57),
1739 tcg_opc_movi_a(TCG_REG_P7, TCG_REG_R59, mem_index));
1740 label_ptr = s->code_ptr;
1741 tcg_out_bundle(s, miB,
1742 tcg_opc_m4 (TCG_REG_P6, opc_st_m4[s_bits],
1743 TCG_REG_R58, TCG_REG_R2),
1744 INSN_NOP_I,
1745 tcg_opc_b3 (TCG_REG_P7, OPC_BR_CALL_SPNT_FEW_B3, TCG_REG_B0,
1746 get_reloc_pcrel21b_slot2(label_ptr)));
1748 add_qemu_ldst_label(s, 0, opc, label_ptr);
1751 #else /* !CONFIG_SOFTMMU */
1752 # include "tcg-be-null.h"
1754 static inline void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args)
1756 static uint64_t const opc_ld_m1[4] = {
1757 OPC_LD1_M1, OPC_LD2_M1, OPC_LD4_M1, OPC_LD8_M1
1759 int addr_reg, data_reg;
1760 TCGMemOp opc, s_bits, bswap;
1762 data_reg = args[0];
1763 addr_reg = args[1];
1764 opc = args[2];
1765 s_bits = opc & MO_SIZE;
1766 bswap = opc & MO_BSWAP;
1768 #if TARGET_LONG_BITS == 32
1769 if (GUEST_BASE != 0) {
1770 tcg_out_bundle(s, mII,
1771 INSN_NOP_M,
1772 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1773 TCG_REG_R3, addr_reg),
1774 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1775 TCG_GUEST_BASE_REG, TCG_REG_R3));
1776 } else {
1777 tcg_out_bundle(s, miI,
1778 INSN_NOP_M,
1779 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1780 TCG_REG_R2, addr_reg),
1781 INSN_NOP_I);
1784 if (!bswap) {
1785 if (!(opc & MO_SIGN)) {
1786 tcg_out_bundle(s, miI,
1787 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1788 data_reg, TCG_REG_R2),
1789 INSN_NOP_I,
1790 INSN_NOP_I);
1791 } else {
1792 tcg_out_bundle(s, mII,
1793 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1794 data_reg, TCG_REG_R2),
1795 INSN_NOP_I,
1796 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1798 } else if (s_bits == MO_64) {
1799 tcg_out_bundle(s, mII,
1800 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1801 data_reg, TCG_REG_R2),
1802 INSN_NOP_I,
1803 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1804 } else {
1805 if (s_bits == MO_16) {
1806 tcg_out_bundle(s, mII,
1807 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1808 data_reg, TCG_REG_R2),
1809 INSN_NOP_I,
1810 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1811 data_reg, data_reg, 15, 15));
1812 } else {
1813 tcg_out_bundle(s, mII,
1814 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1815 data_reg, TCG_REG_R2),
1816 INSN_NOP_I,
1817 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1818 data_reg, data_reg, 31, 31));
1820 if (!(opc & MO_SIGN)) {
1821 tcg_out_bundle(s, miI,
1822 INSN_NOP_M,
1823 INSN_NOP_I,
1824 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1825 } else {
1826 tcg_out_bundle(s, mII,
1827 INSN_NOP_M,
1828 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg),
1829 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1832 #else
1833 if (GUEST_BASE != 0) {
1834 tcg_out_bundle(s, MmI,
1835 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1836 TCG_GUEST_BASE_REG, addr_reg),
1837 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1838 data_reg, TCG_REG_R2),
1839 INSN_NOP_I);
1840 } else {
1841 tcg_out_bundle(s, mmI,
1842 INSN_NOP_M,
1843 tcg_opc_m1 (TCG_REG_P0, opc_ld_m1[s_bits],
1844 data_reg, addr_reg),
1845 INSN_NOP_I);
1848 if (bswap && s_bits == MO_16) {
1849 tcg_out_bundle(s, mII,
1850 INSN_NOP_M,
1851 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1852 data_reg, data_reg, 15, 15),
1853 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1854 } else if (bswap && s_bits == MO_32) {
1855 tcg_out_bundle(s, mII,
1856 INSN_NOP_M,
1857 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1858 data_reg, data_reg, 31, 31),
1859 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1860 } else if (bswap && s_bits == MO_64) {
1861 tcg_out_bundle(s, miI,
1862 INSN_NOP_M,
1863 INSN_NOP_I,
1864 tcg_opc_bswap64_i(TCG_REG_P0, data_reg, data_reg));
1866 if (opc & MO_SIGN) {
1867 tcg_out_bundle(s, miI,
1868 INSN_NOP_M,
1869 INSN_NOP_I,
1870 tcg_opc_ext_i(TCG_REG_P0, opc, data_reg, data_reg));
1872 #endif
1875 static inline void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
1877 static uint64_t const opc_st_m4[4] = {
1878 OPC_ST1_M4, OPC_ST2_M4, OPC_ST4_M4, OPC_ST8_M4
1880 int addr_reg, data_reg;
1881 #if TARGET_LONG_BITS == 64
1882 uint64_t add_guest_base;
1883 #endif
1884 TCGMemOp opc, s_bits, bswap;
1886 data_reg = args[0];
1887 addr_reg = args[1];
1888 opc = args[2];
1889 s_bits = opc & MO_SIZE;
1890 bswap = opc & MO_BSWAP;
1892 #if TARGET_LONG_BITS == 32
1893 if (GUEST_BASE != 0) {
1894 tcg_out_bundle(s, mII,
1895 INSN_NOP_M,
1896 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1897 TCG_REG_R3, addr_reg),
1898 tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1899 TCG_GUEST_BASE_REG, TCG_REG_R3));
1900 } else {
1901 tcg_out_bundle(s, miI,
1902 INSN_NOP_M,
1903 tcg_opc_i29(TCG_REG_P0, OPC_ZXT4_I29,
1904 TCG_REG_R2, addr_reg),
1905 INSN_NOP_I);
1908 if (bswap) {
1909 if (s_bits == MO_16) {
1910 tcg_out_bundle(s, mII,
1911 INSN_NOP_M,
1912 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1913 TCG_REG_R3, data_reg, 15, 15),
1914 tcg_opc_bswap64_i(TCG_REG_P0,
1915 TCG_REG_R3, TCG_REG_R3));
1916 data_reg = TCG_REG_R3;
1917 } else if (s_bits == MO_32) {
1918 tcg_out_bundle(s, mII,
1919 INSN_NOP_M,
1920 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1921 TCG_REG_R3, data_reg, 31, 31),
1922 tcg_opc_bswap64_i(TCG_REG_P0,
1923 TCG_REG_R3, TCG_REG_R3));
1924 data_reg = TCG_REG_R3;
1925 } else if (s_bits == MO_64) {
1926 tcg_out_bundle(s, miI,
1927 INSN_NOP_M,
1928 INSN_NOP_I,
1929 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1930 data_reg = TCG_REG_R3;
1933 tcg_out_bundle(s, mmI,
1934 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1935 data_reg, TCG_REG_R2),
1936 INSN_NOP_M,
1937 INSN_NOP_I);
1938 #else
1939 if (GUEST_BASE != 0) {
1940 add_guest_base = tcg_opc_a1 (TCG_REG_P0, OPC_ADD_A1, TCG_REG_R2,
1941 TCG_GUEST_BASE_REG, addr_reg);
1942 addr_reg = TCG_REG_R2;
1943 } else {
1944 add_guest_base = INSN_NOP_M;
1947 if (!bswap) {
1948 tcg_out_bundle(s, (GUEST_BASE ? MmI : mmI),
1949 add_guest_base,
1950 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1951 data_reg, addr_reg),
1952 INSN_NOP_I);
1953 } else {
1954 if (s_bits == MO_16) {
1955 tcg_out_bundle(s, mII,
1956 add_guest_base,
1957 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1958 TCG_REG_R3, data_reg, 15, 15),
1959 tcg_opc_bswap64_i(TCG_REG_P0,
1960 TCG_REG_R3, TCG_REG_R3));
1961 data_reg = TCG_REG_R3;
1962 } else if (s_bits == MO_32) {
1963 tcg_out_bundle(s, mII,
1964 add_guest_base,
1965 tcg_opc_i12(TCG_REG_P0, OPC_DEP_Z_I12,
1966 TCG_REG_R3, data_reg, 31, 31),
1967 tcg_opc_bswap64_i(TCG_REG_P0,
1968 TCG_REG_R3, TCG_REG_R3));
1969 data_reg = TCG_REG_R3;
1970 } else if (s_bits == MO_64) {
1971 tcg_out_bundle(s, miI,
1972 add_guest_base,
1973 INSN_NOP_I,
1974 tcg_opc_bswap64_i(TCG_REG_P0, TCG_REG_R3, data_reg));
1975 data_reg = TCG_REG_R3;
1977 tcg_out_bundle(s, miI,
1978 tcg_opc_m4 (TCG_REG_P0, opc_st_m4[s_bits],
1979 data_reg, addr_reg),
1980 INSN_NOP_I,
1981 INSN_NOP_I);
1983 #endif
1986 #endif
1988 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1989 const TCGArg *args, const int *const_args)
1991 switch(opc) {
1992 case INDEX_op_exit_tb:
1993 tcg_out_exit_tb(s, args[0]);
1994 break;
1995 case INDEX_op_br:
1996 tcg_out_br(s, args[0]);
1997 break;
1998 case INDEX_op_goto_tb:
1999 tcg_out_goto_tb(s, args[0]);
2000 break;
2002 case INDEX_op_ld8u_i32:
2003 case INDEX_op_ld8u_i64:
2004 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2005 break;
2006 case INDEX_op_ld8s_i32:
2007 case INDEX_op_ld8s_i64:
2008 tcg_out_ld_rel(s, OPC_LD1_M1, args[0], args[1], args[2]);
2009 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[0]);
2010 break;
2011 case INDEX_op_ld16u_i32:
2012 case INDEX_op_ld16u_i64:
2013 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2014 break;
2015 case INDEX_op_ld16s_i32:
2016 case INDEX_op_ld16s_i64:
2017 tcg_out_ld_rel(s, OPC_LD2_M1, args[0], args[1], args[2]);
2018 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[0]);
2019 break;
2020 case INDEX_op_ld_i32:
2021 case INDEX_op_ld32u_i64:
2022 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2023 break;
2024 case INDEX_op_ld32s_i64:
2025 tcg_out_ld_rel(s, OPC_LD4_M1, args[0], args[1], args[2]);
2026 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[0]);
2027 break;
2028 case INDEX_op_ld_i64:
2029 tcg_out_ld_rel(s, OPC_LD8_M1, args[0], args[1], args[2]);
2030 break;
2031 case INDEX_op_st8_i32:
2032 case INDEX_op_st8_i64:
2033 tcg_out_st_rel(s, OPC_ST1_M4, args[0], args[1], args[2]);
2034 break;
2035 case INDEX_op_st16_i32:
2036 case INDEX_op_st16_i64:
2037 tcg_out_st_rel(s, OPC_ST2_M4, args[0], args[1], args[2]);
2038 break;
2039 case INDEX_op_st_i32:
2040 case INDEX_op_st32_i64:
2041 tcg_out_st_rel(s, OPC_ST4_M4, args[0], args[1], args[2]);
2042 break;
2043 case INDEX_op_st_i64:
2044 tcg_out_st_rel(s, OPC_ST8_M4, args[0], args[1], args[2]);
2045 break;
2047 case INDEX_op_add_i32:
2048 case INDEX_op_add_i64:
2049 tcg_out_add(s, args[0], args[1], args[2], const_args[2]);
2050 break;
2051 case INDEX_op_sub_i32:
2052 case INDEX_op_sub_i64:
2053 tcg_out_sub(s, args[0], args[1], const_args[1], args[2], const_args[2]);
2054 break;
2056 case INDEX_op_and_i32:
2057 case INDEX_op_and_i64:
2058 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2059 tcg_out_alu(s, OPC_AND_A1, OPC_AND_A3, args[0],
2060 args[2], const_args[2], args[1], const_args[1]);
2061 break;
2062 case INDEX_op_andc_i32:
2063 case INDEX_op_andc_i64:
2064 tcg_out_alu(s, OPC_ANDCM_A1, OPC_ANDCM_A3, args[0],
2065 args[1], const_args[1], args[2], const_args[2]);
2066 break;
2067 case INDEX_op_eqv_i32:
2068 case INDEX_op_eqv_i64:
2069 tcg_out_eqv(s, args[0], args[1], const_args[1],
2070 args[2], const_args[2]);
2071 break;
2072 case INDEX_op_nand_i32:
2073 case INDEX_op_nand_i64:
2074 tcg_out_nand(s, args[0], args[1], const_args[1],
2075 args[2], const_args[2]);
2076 break;
2077 case INDEX_op_nor_i32:
2078 case INDEX_op_nor_i64:
2079 tcg_out_nor(s, args[0], args[1], const_args[1],
2080 args[2], const_args[2]);
2081 break;
2082 case INDEX_op_or_i32:
2083 case INDEX_op_or_i64:
2084 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2085 tcg_out_alu(s, OPC_OR_A1, OPC_OR_A3, args[0],
2086 args[2], const_args[2], args[1], const_args[1]);
2087 break;
2088 case INDEX_op_orc_i32:
2089 case INDEX_op_orc_i64:
2090 tcg_out_orc(s, args[0], args[1], const_args[1],
2091 args[2], const_args[2]);
2092 break;
2093 case INDEX_op_xor_i32:
2094 case INDEX_op_xor_i64:
2095 /* TCG expects arg2 constant; A3 expects arg1 constant. Swap. */
2096 tcg_out_alu(s, OPC_XOR_A1, OPC_XOR_A3, args[0],
2097 args[2], const_args[2], args[1], const_args[1]);
2098 break;
2100 case INDEX_op_mul_i32:
2101 case INDEX_op_mul_i64:
2102 tcg_out_mul(s, args[0], args[1], args[2]);
2103 break;
2105 case INDEX_op_sar_i32:
2106 tcg_out_sar_i32(s, args[0], args[1], args[2], const_args[2]);
2107 break;
2108 case INDEX_op_sar_i64:
2109 tcg_out_sar_i64(s, args[0], args[1], args[2], const_args[2]);
2110 break;
2111 case INDEX_op_shl_i32:
2112 tcg_out_shl_i32(s, args[0], args[1], args[2], const_args[2]);
2113 break;
2114 case INDEX_op_shl_i64:
2115 tcg_out_shl_i64(s, args[0], args[1], args[2], const_args[2]);
2116 break;
2117 case INDEX_op_shr_i32:
2118 tcg_out_shr_i32(s, args[0], args[1], args[2], const_args[2]);
2119 break;
2120 case INDEX_op_shr_i64:
2121 tcg_out_shr_i64(s, args[0], args[1], args[2], const_args[2]);
2122 break;
2123 case INDEX_op_rotl_i32:
2124 tcg_out_rotl_i32(s, args[0], args[1], args[2], const_args[2]);
2125 break;
2126 case INDEX_op_rotl_i64:
2127 tcg_out_rotl_i64(s, args[0], args[1], args[2], const_args[2]);
2128 break;
2129 case INDEX_op_rotr_i32:
2130 tcg_out_rotr_i32(s, args[0], args[1], args[2], const_args[2]);
2131 break;
2132 case INDEX_op_rotr_i64:
2133 tcg_out_rotr_i64(s, args[0], args[1], args[2], const_args[2]);
2134 break;
2136 case INDEX_op_ext8s_i32:
2137 case INDEX_op_ext8s_i64:
2138 tcg_out_ext(s, OPC_SXT1_I29, args[0], args[1]);
2139 break;
2140 case INDEX_op_ext8u_i32:
2141 case INDEX_op_ext8u_i64:
2142 tcg_out_ext(s, OPC_ZXT1_I29, args[0], args[1]);
2143 break;
2144 case INDEX_op_ext16s_i32:
2145 case INDEX_op_ext16s_i64:
2146 tcg_out_ext(s, OPC_SXT2_I29, args[0], args[1]);
2147 break;
2148 case INDEX_op_ext16u_i32:
2149 case INDEX_op_ext16u_i64:
2150 tcg_out_ext(s, OPC_ZXT2_I29, args[0], args[1]);
2151 break;
2152 case INDEX_op_ext32s_i64:
2153 tcg_out_ext(s, OPC_SXT4_I29, args[0], args[1]);
2154 break;
2155 case INDEX_op_ext32u_i64:
2156 tcg_out_ext(s, OPC_ZXT4_I29, args[0], args[1]);
2157 break;
2159 case INDEX_op_bswap16_i32:
2160 case INDEX_op_bswap16_i64:
2161 tcg_out_bswap16(s, args[0], args[1]);
2162 break;
2163 case INDEX_op_bswap32_i32:
2164 case INDEX_op_bswap32_i64:
2165 tcg_out_bswap32(s, args[0], args[1]);
2166 break;
2167 case INDEX_op_bswap64_i64:
2168 tcg_out_bswap64(s, args[0], args[1]);
2169 break;
2171 case INDEX_op_deposit_i32:
2172 case INDEX_op_deposit_i64:
2173 tcg_out_deposit(s, args[0], args[1], args[2], const_args[2],
2174 args[3], args[4]);
2175 break;
2177 case INDEX_op_brcond_i32:
2178 tcg_out_brcond(s, args[2], args[0], args[1], args[3], 1);
2179 break;
2180 case INDEX_op_brcond_i64:
2181 tcg_out_brcond(s, args[2], args[0], args[1], args[3], 0);
2182 break;
2183 case INDEX_op_setcond_i32:
2184 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 1);
2185 break;
2186 case INDEX_op_setcond_i64:
2187 tcg_out_setcond(s, args[3], args[0], args[1], args[2], 0);
2188 break;
2189 case INDEX_op_movcond_i32:
2190 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2191 args[3], const_args[3], args[4], const_args[4], 1);
2192 break;
2193 case INDEX_op_movcond_i64:
2194 tcg_out_movcond(s, args[5], args[0], args[1], args[2],
2195 args[3], const_args[3], args[4], const_args[4], 0);
2196 break;
2198 case INDEX_op_qemu_ld_i32:
2199 tcg_out_qemu_ld(s, args);
2200 break;
2201 case INDEX_op_qemu_ld_i64:
2202 tcg_out_qemu_ld(s, args);
2203 break;
2204 case INDEX_op_qemu_st_i32:
2205 tcg_out_qemu_st(s, args);
2206 break;
2207 case INDEX_op_qemu_st_i64:
2208 tcg_out_qemu_st(s, args);
2209 break;
2211 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
2212 case INDEX_op_mov_i64:
2213 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
2214 case INDEX_op_movi_i64:
2215 case INDEX_op_call: /* Always emitted via tcg_out_call. */
2216 default:
2217 tcg_abort();
2221 static const TCGTargetOpDef ia64_op_defs[] = {
2222 { INDEX_op_br, { } },
2223 { INDEX_op_exit_tb, { } },
2224 { INDEX_op_goto_tb, { } },
2226 { INDEX_op_ld8u_i32, { "r", "r" } },
2227 { INDEX_op_ld8s_i32, { "r", "r" } },
2228 { INDEX_op_ld16u_i32, { "r", "r" } },
2229 { INDEX_op_ld16s_i32, { "r", "r" } },
2230 { INDEX_op_ld_i32, { "r", "r" } },
2231 { INDEX_op_st8_i32, { "rZ", "r" } },
2232 { INDEX_op_st16_i32, { "rZ", "r" } },
2233 { INDEX_op_st_i32, { "rZ", "r" } },
2235 { INDEX_op_add_i32, { "r", "rZ", "rI" } },
2236 { INDEX_op_sub_i32, { "r", "rI", "rI" } },
2238 { INDEX_op_and_i32, { "r", "rI", "rI" } },
2239 { INDEX_op_andc_i32, { "r", "rI", "rI" } },
2240 { INDEX_op_eqv_i32, { "r", "rZ", "rZ" } },
2241 { INDEX_op_nand_i32, { "r", "rZ", "rZ" } },
2242 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
2243 { INDEX_op_or_i32, { "r", "rI", "rI" } },
2244 { INDEX_op_orc_i32, { "r", "rZ", "rZ" } },
2245 { INDEX_op_xor_i32, { "r", "rI", "rI" } },
2247 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
2249 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
2250 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
2251 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
2252 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
2253 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
2255 { INDEX_op_ext8s_i32, { "r", "rZ"} },
2256 { INDEX_op_ext8u_i32, { "r", "rZ"} },
2257 { INDEX_op_ext16s_i32, { "r", "rZ"} },
2258 { INDEX_op_ext16u_i32, { "r", "rZ"} },
2260 { INDEX_op_bswap16_i32, { "r", "rZ" } },
2261 { INDEX_op_bswap32_i32, { "r", "rZ" } },
2263 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
2264 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
2265 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rI", "rI" } },
2267 { INDEX_op_ld8u_i64, { "r", "r" } },
2268 { INDEX_op_ld8s_i64, { "r", "r" } },
2269 { INDEX_op_ld16u_i64, { "r", "r" } },
2270 { INDEX_op_ld16s_i64, { "r", "r" } },
2271 { INDEX_op_ld32u_i64, { "r", "r" } },
2272 { INDEX_op_ld32s_i64, { "r", "r" } },
2273 { INDEX_op_ld_i64, { "r", "r" } },
2274 { INDEX_op_st8_i64, { "rZ", "r" } },
2275 { INDEX_op_st16_i64, { "rZ", "r" } },
2276 { INDEX_op_st32_i64, { "rZ", "r" } },
2277 { INDEX_op_st_i64, { "rZ", "r" } },
2279 { INDEX_op_add_i64, { "r", "rZ", "rI" } },
2280 { INDEX_op_sub_i64, { "r", "rI", "rI" } },
2282 { INDEX_op_and_i64, { "r", "rI", "rI" } },
2283 { INDEX_op_andc_i64, { "r", "rI", "rI" } },
2284 { INDEX_op_eqv_i64, { "r", "rZ", "rZ" } },
2285 { INDEX_op_nand_i64, { "r", "rZ", "rZ" } },
2286 { INDEX_op_nor_i64, { "r", "rZ", "rZ" } },
2287 { INDEX_op_or_i64, { "r", "rI", "rI" } },
2288 { INDEX_op_orc_i64, { "r", "rZ", "rZ" } },
2289 { INDEX_op_xor_i64, { "r", "rI", "rI" } },
2291 { INDEX_op_mul_i64, { "r", "rZ", "rZ" } },
2293 { INDEX_op_sar_i64, { "r", "rZ", "ri" } },
2294 { INDEX_op_shl_i64, { "r", "rZ", "ri" } },
2295 { INDEX_op_shr_i64, { "r", "rZ", "ri" } },
2296 { INDEX_op_rotl_i64, { "r", "rZ", "ri" } },
2297 { INDEX_op_rotr_i64, { "r", "rZ", "ri" } },
2299 { INDEX_op_ext8s_i64, { "r", "rZ"} },
2300 { INDEX_op_ext8u_i64, { "r", "rZ"} },
2301 { INDEX_op_ext16s_i64, { "r", "rZ"} },
2302 { INDEX_op_ext16u_i64, { "r", "rZ"} },
2303 { INDEX_op_ext32s_i64, { "r", "rZ"} },
2304 { INDEX_op_ext32u_i64, { "r", "rZ"} },
2306 { INDEX_op_bswap16_i64, { "r", "rZ" } },
2307 { INDEX_op_bswap32_i64, { "r", "rZ" } },
2308 { INDEX_op_bswap64_i64, { "r", "rZ" } },
2310 { INDEX_op_brcond_i64, { "rZ", "rZ" } },
2311 { INDEX_op_setcond_i64, { "r", "rZ", "rZ" } },
2312 { INDEX_op_movcond_i64, { "r", "rZ", "rZ", "rI", "rI" } },
2314 { INDEX_op_deposit_i32, { "r", "rZ", "ri" } },
2315 { INDEX_op_deposit_i64, { "r", "rZ", "ri" } },
2317 { INDEX_op_qemu_ld_i32, { "r", "r" } },
2318 { INDEX_op_qemu_ld_i64, { "r", "r" } },
2319 { INDEX_op_qemu_st_i32, { "SZ", "r" } },
2320 { INDEX_op_qemu_st_i64, { "SZ", "r" } },
2322 { -1 },
2325 /* Generate global QEMU prologue and epilogue code */
2326 static void tcg_target_qemu_prologue(TCGContext *s)
2328 int frame_size;
2330 /* reserve some stack space */
2331 frame_size = TCG_STATIC_CALL_ARGS_SIZE +
2332 CPU_TEMP_BUF_NLONGS * sizeof(long);
2333 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
2334 ~(TCG_TARGET_STACK_ALIGN - 1);
2335 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
2336 CPU_TEMP_BUF_NLONGS * sizeof(long));
2338 /* First emit adhoc function descriptor */
2339 *s->code_ptr = (tcg_insn_unit){
2340 (uint64_t)(s->code_ptr + 1), /* entry point */
2341 0 /* skip gp */
2343 s->code_ptr++;
2345 /* prologue */
2346 tcg_out_bundle(s, miI,
2347 tcg_opc_m34(TCG_REG_P0, OPC_ALLOC_M34,
2348 TCG_REG_R34, 32, 24, 0),
2349 INSN_NOP_I,
2350 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2351 TCG_REG_B6, TCG_REG_R33, 0));
2353 /* ??? If GUEST_BASE < 0x200000, we could load the register via
2354 an ADDL in the M slot of the next bundle. */
2355 if (GUEST_BASE != 0) {
2356 tcg_out_bundle(s, mlx,
2357 INSN_NOP_M,
2358 tcg_opc_l2 (GUEST_BASE),
2359 tcg_opc_x2 (TCG_REG_P0, OPC_MOVL_X2,
2360 TCG_GUEST_BASE_REG, GUEST_BASE));
2361 tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
2364 tcg_out_bundle(s, miB,
2365 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2366 TCG_REG_R12, -frame_size, TCG_REG_R12),
2367 tcg_opc_i22(TCG_REG_P0, OPC_MOV_I22,
2368 TCG_REG_R33, TCG_REG_B0),
2369 tcg_opc_b4 (TCG_REG_P0, OPC_BR_SPTK_MANY_B4, TCG_REG_B6));
2371 /* epilogue */
2372 tb_ret_addr = s->code_ptr;
2373 tcg_out_bundle(s, miI,
2374 INSN_NOP_M,
2375 tcg_opc_i21(TCG_REG_P0, OPC_MOV_I21,
2376 TCG_REG_B0, TCG_REG_R33, 0),
2377 tcg_opc_a4 (TCG_REG_P0, OPC_ADDS_A4,
2378 TCG_REG_R12, frame_size, TCG_REG_R12));
2379 tcg_out_bundle(s, miB,
2380 INSN_NOP_M,
2381 tcg_opc_i26(TCG_REG_P0, OPC_MOV_I_I26,
2382 TCG_REG_PFS, TCG_REG_R34),
2383 tcg_opc_b4 (TCG_REG_P0, OPC_BR_RET_SPTK_MANY_B4,
2384 TCG_REG_B0));
2387 static void tcg_target_init(TCGContext *s)
2389 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32],
2390 0xffffffffffffffffull);
2391 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I64],
2392 0xffffffffffffffffull);
2394 tcg_regset_clear(tcg_target_call_clobber_regs);
2395 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R8);
2396 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R9);
2397 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R10);
2398 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R11);
2399 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R14);
2400 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R15);
2401 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R16);
2402 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R17);
2403 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R18);
2404 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R19);
2405 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R20);
2406 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R21);
2407 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R22);
2408 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R23);
2409 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R24);
2410 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R25);
2411 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R26);
2412 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R27);
2413 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R28);
2414 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R29);
2415 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R30);
2416 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R31);
2417 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R56);
2418 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R57);
2419 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R58);
2420 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R59);
2421 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R60);
2422 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R61);
2423 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R62);
2424 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_R63);
2426 tcg_regset_clear(s->reserved_regs);
2427 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R0); /* zero register */
2428 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R1); /* global pointer */
2429 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R2); /* internal use */
2430 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R3); /* internal use */
2431 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R12); /* stack pointer */
2432 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R13); /* thread pointer */
2433 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R33); /* return address */
2434 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R34); /* PFS */
2436 /* The following 4 are not in use, are call-saved, but *not* saved
2437 by the prologue. Therefore we cannot use them without modifying
2438 the prologue. There doesn't seem to be any good reason to use
2439 these as opposed to the windowed registers. */
2440 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R4);
2441 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R5);
2442 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R6);
2443 tcg_regset_set_reg(s->reserved_regs, TCG_REG_R7);
2445 tcg_add_target_add_op_defs(ia64_op_defs);